mirror of
https://github.com/philomena-dev/philomena.git
synced 2024-11-23 20:18:00 +01:00
Merge pull request #166 from philomena-dev/s3
Migrate to object storage
This commit is contained in:
commit
743699c6af
26 changed files with 894 additions and 376 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -35,6 +35,7 @@ npm-debug.log
|
|||
# we ignore priv/static. You may want to comment
|
||||
# this depending on your deployment strategy.
|
||||
/priv/static/
|
||||
/priv/s3
|
||||
|
||||
# Intellij IDEA
|
||||
.idea
|
||||
|
|
|
@ -17,7 +17,6 @@ config :philomena,
|
|||
elasticsearch_url: System.get_env("ELASTICSEARCH_URL", "http://localhost:9200"),
|
||||
advert_file_root: System.fetch_env!("ADVERT_FILE_ROOT"),
|
||||
avatar_file_root: System.fetch_env!("AVATAR_FILE_ROOT"),
|
||||
channel_url_root: System.fetch_env!("CHANNEL_URL_ROOT"),
|
||||
badge_file_root: System.fetch_env!("BADGE_FILE_ROOT"),
|
||||
password_pepper: System.fetch_env!("PASSWORD_PEPPER"),
|
||||
avatar_url_root: System.fetch_env!("AVATAR_URL_ROOT"),
|
||||
|
@ -67,6 +66,40 @@ if is_nil(System.get_env("START_WORKER")) do
|
|||
config :exq, queues: []
|
||||
end
|
||||
|
||||
# S3/Object store config
|
||||
config :philomena, :s3_primary_options,
|
||||
region: System.get_env("S3_REGION", "us-east-1"),
|
||||
scheme: System.fetch_env!("S3_SCHEME"),
|
||||
host: System.fetch_env!("S3_HOST"),
|
||||
port: System.fetch_env!("S3_PORT"),
|
||||
access_key_id: System.fetch_env!("AWS_ACCESS_KEY_ID"),
|
||||
secret_access_key: System.fetch_env!("AWS_SECRET_ACCESS_KEY"),
|
||||
http_opts: [timeout: 180_000, recv_timeout: 180_000]
|
||||
|
||||
config :philomena, :s3_primary_bucket, System.fetch_env!("S3_BUCKET")
|
||||
|
||||
config :philomena, :s3_secondary_options,
|
||||
region: System.get_env("ALT_S3_REGION", "us-east-1"),
|
||||
scheme: System.get_env("ALT_S3_SCHEME"),
|
||||
host: System.get_env("ALT_S3_HOST"),
|
||||
port: System.get_env("ALT_S3_PORT"),
|
||||
access_key_id: System.get_env("ALT_AWS_ACCESS_KEY_ID"),
|
||||
secret_access_key: System.get_env("ALT_AWS_SECRET_ACCESS_KEY"),
|
||||
http_opts: [timeout: 180_000, recv_timeout: 180_000]
|
||||
|
||||
config :philomena, :s3_secondary_bucket, System.get_env("ALT_S3_BUCKET")
|
||||
|
||||
config :ex_aws, :hackney_opts,
|
||||
timeout: 180_000,
|
||||
recv_timeout: 180_000,
|
||||
use_default_pool: false,
|
||||
pool: false
|
||||
|
||||
config :ex_aws, :retries,
|
||||
max_attempts: 20,
|
||||
base_backoff_in_ms: 10,
|
||||
max_backoff_in_ms: 10_000
|
||||
|
||||
if config_env() != :test do
|
||||
# Database config
|
||||
config :philomena, Philomena.Repo,
|
||||
|
|
|
@ -17,17 +17,16 @@ services:
|
|||
- PASSWORD_PEPPER=dn2e0EpZrvBLoxUM3gfQveBhjf0bG/6/bYhrOyq3L3hV9hdo/bimJ+irbDWsuXLP
|
||||
- TUMBLR_API_KEY=fuiKNFp9vQFvjLNvx4sUwti4Yb5yGutBN4Xh10LXZhhRKjWlV4
|
||||
- OTP_SECRET_KEY=Wn7O/8DD+qxL0X4X7bvT90wOkVGcA90bIHww4twR03Ci//zq7PnMw8ypqyyT/b/C
|
||||
- ADVERT_FILE_ROOT=priv/static/system/images/adverts
|
||||
- AVATAR_FILE_ROOT=priv/static/system/images/avatars
|
||||
- BADGE_FILE_ROOT=priv/static/system/images
|
||||
- IMAGE_FILE_ROOT=priv/static/system/images
|
||||
- TAG_FILE_ROOT=priv/static/system/images
|
||||
- CHANNEL_URL_ROOT=/media
|
||||
- ADVERT_FILE_ROOT=adverts
|
||||
- AVATAR_FILE_ROOT=avatars
|
||||
- BADGE_FILE_ROOT=badges
|
||||
- IMAGE_FILE_ROOT=images
|
||||
- TAG_FILE_ROOT=tags
|
||||
- AVATAR_URL_ROOT=/avatars
|
||||
- ADVERT_URL_ROOT=/spns
|
||||
- IMAGE_URL_ROOT=/img
|
||||
- BADGE_URL_ROOT=/media
|
||||
- TAG_URL_ROOT=/media
|
||||
- BADGE_URL_ROOT=/badge-img
|
||||
- TAG_URL_ROOT=/tag-img
|
||||
- ELASTICSEARCH_URL=http://elasticsearch:9200
|
||||
- REDIS_HOST=redis
|
||||
- DATABASE_URL=ecto://postgres:postgres@postgres/philomena_dev
|
||||
|
@ -35,6 +34,12 @@ services:
|
|||
- MAILER_ADDRESS=noreply@philomena.local
|
||||
- START_ENDPOINT=true
|
||||
- SITE_DOMAINS=localhost
|
||||
- S3_SCHEME=http
|
||||
- S3_HOST=files
|
||||
- S3_PORT=80
|
||||
- S3_BUCKET=philomena
|
||||
- AWS_ACCESS_KEY_ID=local-identity
|
||||
- AWS_SECRET_ACCESS_KEY=local-credential
|
||||
working_dir: /srv/philomena
|
||||
tty: true
|
||||
volumes:
|
||||
|
@ -71,12 +76,28 @@ services:
|
|||
logging:
|
||||
driver: "none"
|
||||
|
||||
files:
|
||||
image: andrewgaul/s3proxy:sha-ba0fd6d
|
||||
environment:
|
||||
- JCLOUDS_FILESYSTEM_BASEDIR=/srv/philomena/priv/s3
|
||||
volumes:
|
||||
- .:/srv/philomena
|
||||
|
||||
web:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/web/Dockerfile
|
||||
args:
|
||||
- APP_DIR=/srv/philomena
|
||||
- S3_SCHEME=http
|
||||
- S3_HOST=files
|
||||
- S3_PORT=80
|
||||
- S3_BUCKET=philomena
|
||||
volumes:
|
||||
- .:/srv/philomena
|
||||
environment:
|
||||
- AWS_ACCESS_KEY_ID=local-identity
|
||||
- AWS_SECRET_ACCESS_KEY=local-credential
|
||||
logging:
|
||||
driver: "none"
|
||||
depends_on:
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
# Create S3 dirs
|
||||
mkdir -p /srv/philomena/priv/static/system/images/thumbs
|
||||
mkdir -p /srv/philomena/priv/s3/philomena
|
||||
ln -sf /srv/philomena/priv/static/system/images/thumbs /srv/philomena/priv/s3/philomena/images
|
||||
ln -sf /srv/philomena/priv/static/system/images /srv/philomena/priv/s3/philomena/adverts
|
||||
ln -sf /srv/philomena/priv/static/system/images /srv/philomena/priv/s3/philomena/avatars
|
||||
ln -sf /srv/philomena/priv/static/system/images /srv/philomena/priv/s3/philomena/badges
|
||||
ln -sf /srv/philomena/priv/static/system/images /srv/philomena/priv/s3/philomena/tags
|
||||
|
||||
# For compatibility with musl libc
|
||||
export CARGO_FEATURE_DISABLE_INITIAL_EXEC_TLS=1
|
||||
export CARGO_HOME=/srv/philomena/.cargo
|
||||
|
|
|
@ -1,6 +1,16 @@
|
|||
FROM nginx:1.23.2-alpine
|
||||
ENV APP_DIR /srv/philomena
|
||||
FROM openresty/openresty:1.21.4.1-4-alpine
|
||||
ARG APP_DIR
|
||||
ARG S3_SCHEME
|
||||
ARG S3_HOST
|
||||
ARG S3_PORT
|
||||
ARG S3_BUCKET
|
||||
|
||||
RUN apk add --no-cache gettext curl perl && opm get jkeys089/lua-resty-hmac=0.06 && mkdir -p /etc/nginx/lua
|
||||
COPY docker/web/aws-signature.lua /etc/nginx/lua
|
||||
COPY docker/web/nginx.conf /tmp/docker.nginx
|
||||
RUN envsubst '$APP_DIR' < /tmp/docker.nginx > /etc/nginx/conf.d/default.conf
|
||||
RUN envsubst '$APP_DIR $S3_SCHEME $S3_HOST $S3_PORT $S3_BUCKET' < /tmp/docker.nginx > /etc/nginx/conf.d/default.conf && \
|
||||
mkdir -p /var/www/cache/tmp && \
|
||||
echo 'env AWS_ACCESS_KEY_ID;' >> /usr/local/openresty/nginx/conf/nginx.conf && \
|
||||
echo 'env AWS_SECRET_ACCESS_KEY;' >> /usr/local/openresty/nginx/conf/nginx.conf
|
||||
EXPOSE 80
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
CMD ["openresty", "-g", "daemon off;"]
|
||||
|
|
149
docker/web/aws-signature.lua
Normal file
149
docker/web/aws-signature.lua
Normal file
|
@ -0,0 +1,149 @@
|
|||
--[[
|
||||
Copyright 2018 JobTeaser
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
--]]
|
||||
|
||||
local cjson = require('cjson')
|
||||
local resty_hmac = require('resty.hmac')
|
||||
local resty_sha256 = require('resty.sha256')
|
||||
local str = require('resty.string')
|
||||
|
||||
local _M = { _VERSION = '0.1.2' }
|
||||
|
||||
local function get_credentials ()
|
||||
local access_key = os.getenv('AWS_ACCESS_KEY_ID')
|
||||
local secret_key = os.getenv('AWS_SECRET_ACCESS_KEY')
|
||||
|
||||
return {
|
||||
access_key = access_key,
|
||||
secret_key = secret_key
|
||||
}
|
||||
end
|
||||
|
||||
local function get_iso8601_basic(timestamp)
|
||||
return os.date('!%Y%m%dT%H%M%SZ', timestamp)
|
||||
end
|
||||
|
||||
local function get_iso8601_basic_short(timestamp)
|
||||
return os.date('!%Y%m%d', timestamp)
|
||||
end
|
||||
|
||||
local function get_derived_signing_key(keys, timestamp, region, service)
|
||||
local h_date = resty_hmac:new('AWS4' .. keys['secret_key'], resty_hmac.ALGOS.SHA256)
|
||||
h_date:update(get_iso8601_basic_short(timestamp))
|
||||
k_date = h_date:final()
|
||||
|
||||
local h_region = resty_hmac:new(k_date, resty_hmac.ALGOS.SHA256)
|
||||
h_region:update(region)
|
||||
k_region = h_region:final()
|
||||
|
||||
local h_service = resty_hmac:new(k_region, resty_hmac.ALGOS.SHA256)
|
||||
h_service:update(service)
|
||||
k_service = h_service:final()
|
||||
|
||||
local h = resty_hmac:new(k_service, resty_hmac.ALGOS.SHA256)
|
||||
h:update('aws4_request')
|
||||
return h:final()
|
||||
end
|
||||
|
||||
local function get_cred_scope(timestamp, region, service)
|
||||
return get_iso8601_basic_short(timestamp)
|
||||
.. '/' .. region
|
||||
.. '/' .. service
|
||||
.. '/aws4_request'
|
||||
end
|
||||
|
||||
local function get_signed_headers()
|
||||
return 'host;x-amz-content-sha256;x-amz-date'
|
||||
end
|
||||
|
||||
local function get_sha256_digest(s)
|
||||
local h = resty_sha256:new()
|
||||
h:update(s or '')
|
||||
return str.to_hex(h:final())
|
||||
end
|
||||
|
||||
local function get_hashed_canonical_request(timestamp, host, uri)
|
||||
local digest = get_sha256_digest(ngx.var.request_body)
|
||||
local canonical_request = ngx.var.request_method .. '\n'
|
||||
.. uri .. '\n'
|
||||
.. '\n'
|
||||
.. 'host:' .. host .. '\n'
|
||||
.. 'x-amz-content-sha256:' .. digest .. '\n'
|
||||
.. 'x-amz-date:' .. get_iso8601_basic(timestamp) .. '\n'
|
||||
.. '\n'
|
||||
.. get_signed_headers() .. '\n'
|
||||
.. digest
|
||||
return get_sha256_digest(canonical_request)
|
||||
end
|
||||
|
||||
local function get_string_to_sign(timestamp, region, service, host, uri)
|
||||
return 'AWS4-HMAC-SHA256\n'
|
||||
.. get_iso8601_basic(timestamp) .. '\n'
|
||||
.. get_cred_scope(timestamp, region, service) .. '\n'
|
||||
.. get_hashed_canonical_request(timestamp, host, uri)
|
||||
end
|
||||
|
||||
local function get_signature(derived_signing_key, string_to_sign)
|
||||
local h = resty_hmac:new(derived_signing_key, resty_hmac.ALGOS.SHA256)
|
||||
h:update(string_to_sign)
|
||||
return h:final(nil, true)
|
||||
end
|
||||
|
||||
local function get_authorization(keys, timestamp, region, service, host, uri)
|
||||
local derived_signing_key = get_derived_signing_key(keys, timestamp, region, service)
|
||||
local string_to_sign = get_string_to_sign(timestamp, region, service, host, uri)
|
||||
local auth = 'AWS4-HMAC-SHA256 '
|
||||
.. 'Credential=' .. keys['access_key'] .. '/' .. get_cred_scope(timestamp, region, service)
|
||||
.. ', SignedHeaders=' .. get_signed_headers()
|
||||
.. ', Signature=' .. get_signature(derived_signing_key, string_to_sign)
|
||||
return auth
|
||||
end
|
||||
|
||||
local function get_service_and_region(host)
|
||||
local patterns = {
|
||||
{'s3.amazonaws.com', 's3', 'us-east-1'},
|
||||
{'s3-external-1.amazonaws.com', 's3', 'us-east-1'},
|
||||
{'s3%-([a-z0-9-]+)%.amazonaws%.com', 's3', nil}
|
||||
}
|
||||
|
||||
for i,data in ipairs(patterns) do
|
||||
local region = host:match(data[1])
|
||||
if region ~= nil and data[3] == nil then
|
||||
return data[2], region
|
||||
elseif region ~= nil then
|
||||
return data[2], data[3]
|
||||
end
|
||||
end
|
||||
|
||||
return 's3', 'auto'
|
||||
end
|
||||
|
||||
function _M.aws_set_headers(host, uri)
|
||||
local creds = get_credentials()
|
||||
local timestamp = tonumber(ngx.time())
|
||||
local service, region = get_service_and_region(host)
|
||||
local auth = get_authorization(creds, timestamp, region, service, host, uri)
|
||||
|
||||
ngx.req.set_header('Authorization', auth)
|
||||
ngx.req.set_header('Host', host)
|
||||
ngx.req.set_header('x-amz-date', get_iso8601_basic(timestamp))
|
||||
end
|
||||
|
||||
function _M.s3_set_headers(host, uri)
|
||||
_M.aws_set_headers(host, uri)
|
||||
ngx.req.set_header('x-amz-content-sha256', get_sha256_digest(ngx.var.request_body))
|
||||
end
|
||||
|
||||
return _M
|
|
@ -2,6 +2,51 @@ upstream philomena {
|
|||
server app:4000 fail_timeout=0;
|
||||
}
|
||||
|
||||
map $uri $custom_content_type {
|
||||
default "text/html";
|
||||
~(.*\.png)$ "image/png";
|
||||
~(.*\.jpe?g)$ "image/jpeg";
|
||||
~(.*\.gif)$ "image/gif";
|
||||
~(.*\.svg)$ "image/svg+xml";
|
||||
~(.*\.mp4)$ "video/mp4";
|
||||
~(.*\.webm)$ "video/webm";
|
||||
}
|
||||
|
||||
lua_package_path '/etc/nginx/lua/?.lua;;';
|
||||
resolver 1.1.1.1 ipv6=off;
|
||||
|
||||
init_by_lua_block {
|
||||
aws_sig = require('aws-signature')
|
||||
|
||||
function clear_request()
|
||||
-- Get rid of any client state that could cause
|
||||
-- issues for the proxied request
|
||||
for h, _ in pairs(ngx.req.get_headers()) do
|
||||
if string.lower(h) ~= 'range' then
|
||||
ngx.req.clear_header(h)
|
||||
end
|
||||
end
|
||||
|
||||
ngx.req.set_uri_args({})
|
||||
ngx.req.discard_body()
|
||||
end
|
||||
|
||||
function sign_aws_request()
|
||||
-- The API token used should not allow writing, but
|
||||
-- sanitize this anyway to stop an upstream error
|
||||
if ngx.req.get_method() ~= 'GET' then
|
||||
ngx.status = ngx.HTTP_UNAUTHORIZED
|
||||
ngx.say('Unauthorized')
|
||||
return ngx.exit(ngx.HTTP_UNAUTHORIZED)
|
||||
end
|
||||
|
||||
clear_request()
|
||||
aws_sig.s3_set_headers("$S3_HOST", ngx.var.uri)
|
||||
end
|
||||
}
|
||||
|
||||
proxy_cache_path /var/www/cache levels=1:2 keys_zone=s3-cache:8m max_size=1000m inactive=600m;
|
||||
|
||||
server {
|
||||
listen 80 default;
|
||||
listen [::]:80;
|
||||
|
@ -11,41 +56,63 @@ server {
|
|||
client_max_body_size 125000000;
|
||||
client_body_buffer_size 128k;
|
||||
|
||||
location ~ ^/img/view/(.+)/([0-9]+).*\.([A-Za-z0-9]+)$ {
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
alias "$APP_DIR/priv/static/system/images/thumbs/$1/$2/full.$3";
|
||||
location ~ ^/$S3_BUCKET {
|
||||
internal;
|
||||
|
||||
access_by_lua "sign_aws_request()";
|
||||
proxy_pass "$S3_SCHEME://$S3_HOST:$S3_PORT";
|
||||
proxy_cache s3-cache;
|
||||
proxy_cache_valid 1h;
|
||||
proxy_hide_header Content-Type;
|
||||
proxy_ssl_server_name on;
|
||||
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
add_header Content-Type $custom_content_type;
|
||||
}
|
||||
|
||||
location ~ ^/img/download/(.+)/([0-9]+).*\.([A-Za-z0-9]+)$ {
|
||||
add_header Content-Disposition "attachment";
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
alias "$APP_DIR/priv/static/system/images/thumbs/$1/$2/full.$3";
|
||||
location ~ ^/img/download/(.+)/([0-9]+).*\.([A-Za-z0-9]+)$ {
|
||||
rewrite ^/img/download/(.+)/([0-9]+).*\.([A-Za-z0-9]+)$ "/$S3_BUCKET/images/$1/$2/full.$3" break;
|
||||
|
||||
access_by_lua "sign_aws_request()";
|
||||
proxy_pass "$S3_SCHEME://$S3_HOST:$S3_PORT";
|
||||
proxy_cache s3-cache;
|
||||
proxy_cache_valid 1h;
|
||||
proxy_hide_header Content-Type;
|
||||
proxy_ssl_server_name on;
|
||||
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
add_header Content-Type $custom_content_type;
|
||||
add_header Content-Disposition "attachment";
|
||||
}
|
||||
|
||||
location ~ ^/img/(.+) {
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
alias $APP_DIR/priv/static/system/images/thumbs/$1;
|
||||
location ~ ^/img/view/(.+)/([0-9]+).*\.([A-Za-z0-9]+)$ {
|
||||
rewrite ^/img/view/(.+)/([0-9]+).*\.([A-Za-z0-9]+)$ "/$S3_BUCKET/images/$1/$2/full.$3" last;
|
||||
}
|
||||
|
||||
location ~ ^/spns/(.+) {
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
alias $APP_DIR/priv/static/system/images/adverts/$1;
|
||||
location ~ ^/img/(.+)$ {
|
||||
rewrite ^/img/(.+)$ "/$S3_BUCKET/images/$1" last;
|
||||
}
|
||||
|
||||
location ~ ^/avatars/(.+) {
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
alias $APP_DIR/priv/static/system/images/avatars/$1;
|
||||
location ~ ^/spns/(.+) {
|
||||
rewrite ^/spns/(.+)$ "/$S3_BUCKET/adverts/$1" last;
|
||||
}
|
||||
|
||||
location ~ ^/media/(.+) {
|
||||
expires max;
|
||||
add_header Cache-Control public;
|
||||
alias $APP_DIR/priv/static/system/images/$1;
|
||||
location ~ ^/avatars/(.+) {
|
||||
rewrite ^/avatars/(.+)$ "/$S3_BUCKET/avatars/$1" last;
|
||||
}
|
||||
|
||||
# The following two location blocks use an -img suffix to avoid
|
||||
# conflicting with the application routes. In production, this
|
||||
# is not necessary since assets will be on a distinct domain.
|
||||
|
||||
location ~ ^/badge-img/(.+) {
|
||||
rewrite ^/badge-img/(.+)$ "/$S3_BUCKET/badges/$1" last;
|
||||
}
|
||||
|
||||
location ~ ^/tag-img/(.+) {
|
||||
rewrite ^/tag-img/(.+)$ "/$S3_BUCKET/tags/$1" last;
|
||||
}
|
||||
|
||||
location / {
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
defmodule Mix.Tasks.RecalculateIntensities do
|
||||
use Mix.Task
|
||||
|
||||
alias Philomena.Images.{Image, Thumbnailer}
|
||||
alias Philomena.ImageIntensities.ImageIntensity
|
||||
alias Philomena.Batch
|
||||
alias Philomena.Repo
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
@shortdoc "Recalculates all intensities for reverse search."
|
||||
@requirements ["app.start"]
|
||||
@impl Mix.Task
|
||||
def run(_args) do
|
||||
Batch.record_batches(Image, fn batch ->
|
||||
batch
|
||||
|> Stream.with_index()
|
||||
|> Stream.each(fn {image, i} ->
|
||||
image_file =
|
||||
cond do
|
||||
image.image_mime_type in ["image/png", "image/jpeg"] ->
|
||||
Thumbnailer.image_file(image)
|
||||
|
||||
true ->
|
||||
Path.join(Thumbnailer.image_thumb_dir(image), "rendered.png")
|
||||
end
|
||||
|
||||
case System.cmd("image-intensities", [image_file]) do
|
||||
{output, 0} ->
|
||||
[nw, ne, sw, se] =
|
||||
output
|
||||
|> String.trim()
|
||||
|> String.split("\t")
|
||||
|> Enum.map(&String.to_float/1)
|
||||
|
||||
ImageIntensity
|
||||
|> where(image_id: ^image.id)
|
||||
|> Repo.update_all(set: [nw: nw, ne: ne, sw: sw, se: se])
|
||||
|
||||
_ ->
|
||||
:err
|
||||
end
|
||||
|
||||
if rem(i, 100) == 0 do
|
||||
IO.write("\r#{image.id}")
|
||||
end
|
||||
end)
|
||||
|> Stream.run()
|
||||
end)
|
||||
|
||||
IO.puts("\nDone")
|
||||
end
|
||||
end
|
171
lib/mix/tasks/upload_to_s3.ex
Normal file
171
lib/mix/tasks/upload_to_s3.ex
Normal file
|
@ -0,0 +1,171 @@
|
|||
defmodule Mix.Tasks.UploadToS3 do
|
||||
use Mix.Task
|
||||
|
||||
alias Philomena.{
|
||||
Adverts.Advert,
|
||||
Badges.Badge,
|
||||
Images.Image,
|
||||
Tags.Tag,
|
||||
Users.User
|
||||
}
|
||||
|
||||
alias Philomena.Images.Thumbnailer
|
||||
alias Philomena.Objects
|
||||
alias Philomena.Batch
|
||||
import Ecto.Query
|
||||
|
||||
@shortdoc "Dumps existing image files to S3 storage backend"
|
||||
@requirements ["app.start"]
|
||||
@impl Mix.Task
|
||||
def run(args) do
|
||||
{args, rest} =
|
||||
OptionParser.parse_head!(args,
|
||||
strict: [
|
||||
concurrency: :integer,
|
||||
adverts: :boolean,
|
||||
avatars: :boolean,
|
||||
badges: :boolean,
|
||||
tags: :boolean,
|
||||
images: :boolean
|
||||
]
|
||||
)
|
||||
|
||||
concurrency = Keyword.get(args, :concurrency, 4)
|
||||
|
||||
time =
|
||||
with [time] <- rest,
|
||||
{:ok, time, _} <- DateTime.from_iso8601(time) do
|
||||
time
|
||||
else
|
||||
_ -> raise ArgumentError, "Must provide a RFC3339 start time, like 1970-01-01T00:00:00Z"
|
||||
end
|
||||
|
||||
if args[:adverts] do
|
||||
file_root = System.get_env("OLD_ADVERT_FILE_ROOT", "priv/static/system/images/adverts")
|
||||
new_file_root = Application.fetch_env!(:philomena, :advert_file_root)
|
||||
|
||||
IO.puts("\nAdverts:")
|
||||
|
||||
upload_typical(
|
||||
where(Advert, [a], not is_nil(a.image) and a.updated_at >= ^time),
|
||||
concurrency,
|
||||
file_root,
|
||||
new_file_root,
|
||||
:image
|
||||
)
|
||||
end
|
||||
|
||||
if args[:avatars] do
|
||||
file_root = System.get_env("OLD_AVATAR_FILE_ROOT", "priv/static/system/images/avatars")
|
||||
new_file_root = Application.fetch_env!(:philomena, :avatar_file_root)
|
||||
|
||||
IO.puts("\nAvatars:")
|
||||
|
||||
upload_typical(
|
||||
where(User, [u], not is_nil(u.avatar) and u.updated_at >= ^time),
|
||||
concurrency,
|
||||
file_root,
|
||||
new_file_root,
|
||||
:avatar
|
||||
)
|
||||
end
|
||||
|
||||
if args[:badges] do
|
||||
file_root = System.get_env("OLD_BADGE_FILE_ROOT", "priv/static/system/images")
|
||||
new_file_root = Application.fetch_env!(:philomena, :badge_file_root)
|
||||
|
||||
IO.puts("\nBadges:")
|
||||
|
||||
upload_typical(
|
||||
where(Badge, [b], not is_nil(b.image) and b.updated_at >= ^time),
|
||||
concurrency,
|
||||
file_root,
|
||||
new_file_root,
|
||||
:image
|
||||
)
|
||||
end
|
||||
|
||||
if args[:tags] do
|
||||
file_root = System.get_env("OLD_TAG_FILE_ROOT", "priv/static/system/images")
|
||||
new_file_root = Application.fetch_env!(:philomena, :tag_file_root)
|
||||
|
||||
IO.puts("\nTags:")
|
||||
|
||||
upload_typical(
|
||||
where(Tag, [t], not is_nil(t.image) and t.updated_at >= ^time),
|
||||
concurrency,
|
||||
file_root,
|
||||
new_file_root,
|
||||
:image
|
||||
)
|
||||
end
|
||||
|
||||
if args[:images] do
|
||||
file_root =
|
||||
Path.join(System.get_env("OLD_IMAGE_FILE_ROOT", "priv/static/system/images"), "thumbs")
|
||||
|
||||
new_file_root = Application.fetch_env!(:philomena, :image_file_root)
|
||||
|
||||
# Temporarily set file root to empty path so we can get the proper prefix
|
||||
Application.put_env(:philomena, :image_file_root, "")
|
||||
|
||||
IO.puts("\nImages:")
|
||||
|
||||
upload_images(
|
||||
where(Image, [i], not is_nil(i.image) and i.updated_at >= ^time),
|
||||
concurrency,
|
||||
file_root,
|
||||
new_file_root
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
defp upload_typical(queryable, batch_size, file_root, new_file_root, field_name) do
|
||||
Batch.record_batches(queryable, [batch_size: batch_size], fn models ->
|
||||
models
|
||||
|> Task.async_stream(&upload_typical_model(&1, file_root, new_file_root, field_name),
|
||||
timeout: :infinity
|
||||
)
|
||||
|> Stream.run()
|
||||
|
||||
IO.write("\r#{hd(models).id} (#{DateTime.to_iso8601(hd(models).updated_at)})")
|
||||
end)
|
||||
end
|
||||
|
||||
defp upload_typical_model(model, file_root, new_file_root, field_name) do
|
||||
field = Map.fetch!(model, field_name)
|
||||
path = Path.join(file_root, field)
|
||||
|
||||
if File.regular?(path) do
|
||||
put_file(path, Path.join(new_file_root, field))
|
||||
end
|
||||
end
|
||||
|
||||
defp upload_images(queryable, batch_size, file_root, new_file_root) do
|
||||
Batch.record_batches(queryable, [batch_size: batch_size], fn models ->
|
||||
models
|
||||
|> Task.async_stream(&upload_image_model(&1, file_root, new_file_root), timeout: :infinity)
|
||||
|> Stream.run()
|
||||
|
||||
IO.write("\r#{hd(models).id} (#{DateTime.to_iso8601(hd(models).updated_at)})")
|
||||
end)
|
||||
end
|
||||
|
||||
defp upload_image_model(model, file_root, new_file_root) do
|
||||
path_prefix = Thumbnailer.image_thumb_prefix(model)
|
||||
|
||||
Thumbnailer.all_versions(model)
|
||||
|> Enum.map(fn version ->
|
||||
path = Path.join([file_root, path_prefix, version])
|
||||
new_path = Path.join([new_file_root, path_prefix, version])
|
||||
|
||||
if File.regular?(path) do
|
||||
put_file(path, new_path)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp put_file(path, uploaded_path) do
|
||||
Objects.put(uploaded_path, path)
|
||||
end
|
||||
end
|
|
@ -8,8 +8,7 @@ defmodule Philomena.Filename do
|
|||
[
|
||||
time_identifier(DateTime.utc_now()),
|
||||
"/",
|
||||
usec_identifier(),
|
||||
pid_identifier(),
|
||||
UUID.uuid1(),
|
||||
".",
|
||||
extension
|
||||
]
|
||||
|
@ -19,17 +18,4 @@ defmodule Philomena.Filename do
|
|||
defp time_identifier(time) do
|
||||
Enum.join([time.year, time.month, time.day], "/")
|
||||
end
|
||||
|
||||
defp usec_identifier do
|
||||
DateTime.utc_now()
|
||||
|> DateTime.to_unix(:microsecond)
|
||||
|> to_string()
|
||||
end
|
||||
|
||||
defp pid_identifier do
|
||||
self()
|
||||
|> :erlang.pid_to_list()
|
||||
|> to_string()
|
||||
|> String.replace(~r/[^0-9]/, "")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,6 +4,7 @@ defmodule Philomena.Images do
|
|||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
require Logger
|
||||
|
||||
alias Ecto.Multi
|
||||
alias Philomena.Repo
|
||||
|
@ -13,7 +14,6 @@ defmodule Philomena.Images do
|
|||
alias Philomena.ImagePurgeWorker
|
||||
alias Philomena.DuplicateReports.DuplicateReport
|
||||
alias Philomena.Images.Image
|
||||
alias Philomena.Images.Hider
|
||||
alias Philomena.Images.Uploader
|
||||
alias Philomena.Images.Tagging
|
||||
alias Philomena.Images.Thumbnailer
|
||||
|
@ -109,10 +109,7 @@ defmodule Philomena.Images do
|
|||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{image: image}} = result ->
|
||||
Uploader.persist_upload(image)
|
||||
Uploader.unpersist_old_upload(image)
|
||||
|
||||
repair_image(image)
|
||||
async_upload(image, attrs["image"])
|
||||
reindex_image(image)
|
||||
Tags.reindex_tags(image.added_tags)
|
||||
maybe_approve_image(image, attribution[:user])
|
||||
|
@ -124,6 +121,44 @@ defmodule Philomena.Images do
|
|||
end
|
||||
end
|
||||
|
||||
defp async_upload(image, plug_upload) do
|
||||
linked_pid =
|
||||
spawn(fn ->
|
||||
# Make sure task will finish before VM exit
|
||||
Process.flag(:trap_exit, true)
|
||||
|
||||
# Wait to be freed up by the caller
|
||||
receive do
|
||||
:ready -> nil
|
||||
end
|
||||
|
||||
# Start trying to upload
|
||||
try_upload(image, 0)
|
||||
end)
|
||||
|
||||
# Give the upload to the linked process
|
||||
Plug.Upload.give_away(plug_upload, linked_pid, self())
|
||||
|
||||
# Free up the linked process
|
||||
send(linked_pid, :ready)
|
||||
end
|
||||
|
||||
defp try_upload(image, retry_count) when retry_count < 100 do
|
||||
try do
|
||||
Uploader.persist_upload(image)
|
||||
repair_image(image)
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("Upload failed: #{inspect(e)} [try ##{retry_count}]")
|
||||
Process.sleep(5000)
|
||||
try_upload(image, retry_count + 1)
|
||||
end
|
||||
end
|
||||
|
||||
defp try_upload(image, retry_count) do
|
||||
Logger.error("Aborting upload of #{image.id} after #{retry_count} retries")
|
||||
end
|
||||
|
||||
defp maybe_create_subscription_on_upload(multi, %User{watch_on_upload: true} = user) do
|
||||
multi
|
||||
|> Multi.run(:subscribe, fn _repo, %{image: image} ->
|
||||
|
@ -196,9 +231,8 @@ defmodule Philomena.Images do
|
|||
|> Repo.update()
|
||||
|> case do
|
||||
{:ok, image} ->
|
||||
Uploader.unpersist_old_upload(image)
|
||||
purge_files(image, image.hidden_image_key)
|
||||
Hider.destroy_thumbnails(image)
|
||||
Thumbnailer.destroy_thumbnails(image)
|
||||
|
||||
{:ok, image}
|
||||
|
||||
|
@ -263,7 +297,6 @@ defmodule Philomena.Images do
|
|||
|> case do
|
||||
{:ok, image} ->
|
||||
Uploader.persist_upload(image)
|
||||
Uploader.unpersist_old_upload(image)
|
||||
|
||||
repair_image(image)
|
||||
purge_files(image, image.hidden_image_key)
|
||||
|
@ -539,14 +572,16 @@ defmodule Philomena.Images do
|
|||
defp process_after_hide(result) do
|
||||
case result do
|
||||
{:ok, %{image: image, tags: tags, reports: {_count, reports}} = result} ->
|
||||
Hider.hide_thumbnails(image, image.hidden_image_key)
|
||||
spawn(fn ->
|
||||
Thumbnailer.hide_thumbnails(image, image.hidden_image_key)
|
||||
purge_files(image, image.hidden_image_key)
|
||||
end)
|
||||
|
||||
Comments.reindex_comments(image)
|
||||
Reports.reindex_reports(reports)
|
||||
Tags.reindex_tags(tags)
|
||||
reindex_image(image)
|
||||
reindex_copied_tags(result)
|
||||
purge_files(image, image.hidden_image_key)
|
||||
|
||||
{:ok, result}
|
||||
|
||||
|
@ -590,7 +625,9 @@ defmodule Philomena.Images do
|
|||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{image: image, tags: tags}} ->
|
||||
Hider.unhide_thumbnails(image, key)
|
||||
spawn(fn ->
|
||||
Thumbnailer.unhide_thumbnails(image, key)
|
||||
end)
|
||||
|
||||
reindex_image(image)
|
||||
purge_files(image, image.hidden_image_key)
|
||||
|
@ -774,7 +811,9 @@ defmodule Philomena.Images do
|
|||
end
|
||||
|
||||
def perform_purge(files) do
|
||||
Hider.purge_cache(files)
|
||||
{_out, 0} = System.cmd("purge-cache", [Jason.encode!(%{files: files})])
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
alias Philomena.Images.Subscription
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
defmodule Philomena.Images.Hider do
|
||||
@moduledoc """
|
||||
Hiding logic for images.
|
||||
"""
|
||||
|
||||
alias Philomena.Images.Image
|
||||
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
def hide_thumbnails(image, key) do
|
||||
source = image_thumb_dir(image)
|
||||
target = image_thumb_dir(image, key)
|
||||
|
||||
File.rm_rf(target)
|
||||
File.rename(source, target)
|
||||
end
|
||||
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
def unhide_thumbnails(image, key) do
|
||||
source = image_thumb_dir(image, key)
|
||||
target = image_thumb_dir(image)
|
||||
|
||||
File.rm_rf(target)
|
||||
File.rename(source, target)
|
||||
end
|
||||
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
def destroy_thumbnails(image) do
|
||||
hidden = image_thumb_dir(image, image.hidden_image_key)
|
||||
normal = image_thumb_dir(image)
|
||||
|
||||
File.rm_rf(hidden)
|
||||
File.rm_rf(normal)
|
||||
end
|
||||
|
||||
def purge_cache(files) do
|
||||
{_out, 0} = System.cmd("purge-cache", [Jason.encode!(%{files: files})])
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
# fixme: these are copied from the thumbnailer
|
||||
defp image_thumb_dir(%Image{created_at: created_at, id: id}),
|
||||
do: Path.join([image_thumbnail_root(), time_identifier(created_at), to_string(id)])
|
||||
|
||||
defp image_thumb_dir(%Image{created_at: created_at, id: id}, key),
|
||||
do:
|
||||
Path.join([image_thumbnail_root(), time_identifier(created_at), to_string(id) <> "-" <> key])
|
||||
|
||||
defp time_identifier(time),
|
||||
do: Enum.join([time.year, time.month, time.day], "/")
|
||||
|
||||
defp image_thumbnail_root,
|
||||
do: Application.get_env(:philomena, :image_file_root) <> "/thumbs"
|
||||
end
|
|
@ -8,6 +8,8 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
alias Philomena.Images.Image
|
||||
alias Philomena.Processors
|
||||
alias Philomena.Analyzers
|
||||
alias Philomena.Uploader
|
||||
alias Philomena.Objects
|
||||
alias Philomena.Sha512
|
||||
alias Philomena.Repo
|
||||
|
||||
|
@ -18,30 +20,63 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
small: {320, 240},
|
||||
medium: {800, 600},
|
||||
large: {1280, 1024},
|
||||
tall: {1024, 4096},
|
||||
full: nil
|
||||
tall: {1024, 4096}
|
||||
]
|
||||
|
||||
def thumbnail_versions do
|
||||
Enum.filter(@versions, fn {_name, dimensions} ->
|
||||
not is_nil(dimensions)
|
||||
@versions
|
||||
end
|
||||
|
||||
# A list of version sizes that should be generated for the image,
|
||||
# based on its dimensions. The processor can generate a list of paths.
|
||||
def generated_sizes(%{image_width: image_width, image_height: image_height}) do
|
||||
Enum.filter(@versions, fn
|
||||
{_name, {width, height}} -> image_width > width or image_height > height
|
||||
end)
|
||||
end
|
||||
|
||||
def thumbnail_urls(image, hidden_key) do
|
||||
Path.join([image_thumb_dir(image), "*"])
|
||||
|> Path.wildcard()
|
||||
|> Enum.map(fn version_name ->
|
||||
Path.join([image_url_base(image, hidden_key), Path.basename(version_name)])
|
||||
image
|
||||
|> all_versions()
|
||||
|> Enum.map(fn name ->
|
||||
Path.join(image_url_base(image, hidden_key), name)
|
||||
end)
|
||||
end
|
||||
|
||||
def hide_thumbnails(image, key) do
|
||||
moved_files = all_versions(image)
|
||||
|
||||
source_prefix = visible_image_thumb_prefix(image)
|
||||
target_prefix = hidden_image_thumb_prefix(image, key)
|
||||
|
||||
bulk_rename(moved_files, source_prefix, target_prefix)
|
||||
end
|
||||
|
||||
def unhide_thumbnails(image, key) do
|
||||
moved_files = all_versions(image)
|
||||
|
||||
source_prefix = hidden_image_thumb_prefix(image, key)
|
||||
target_prefix = visible_image_thumb_prefix(image)
|
||||
|
||||
bulk_rename(moved_files, source_prefix, target_prefix)
|
||||
end
|
||||
|
||||
def destroy_thumbnails(image) do
|
||||
affected_files = all_versions(image)
|
||||
|
||||
hidden_prefix = hidden_image_thumb_prefix(image, image.hidden_image_key)
|
||||
visible_prefix = visible_image_thumb_prefix(image)
|
||||
|
||||
bulk_delete(affected_files, hidden_prefix)
|
||||
bulk_delete(affected_files, visible_prefix)
|
||||
end
|
||||
|
||||
def generate_thumbnails(image_id) do
|
||||
image = Repo.get!(Image, image_id)
|
||||
file = image_file(image)
|
||||
file = download_image_file(image)
|
||||
{:ok, analysis} = Analyzers.analyze(file)
|
||||
|
||||
apply_edit_script(image, Processors.process(analysis, file, @versions))
|
||||
apply_edit_script(image, Processors.process(analysis, file, generated_sizes(image)))
|
||||
generate_dupe_reports(image)
|
||||
recompute_meta(image, file, &Image.thumbnail_changeset/2)
|
||||
|
||||
|
@ -56,16 +91,13 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
do: ImageIntensities.create_image_intensity(image, intensities)
|
||||
|
||||
defp apply_change(image, {:replace_original, new_file}),
|
||||
do: copy(new_file, image_file(image))
|
||||
do: upload_file(image, new_file, "full.#{image.image_format}")
|
||||
|
||||
defp apply_change(image, {:thumbnails, thumbnails}),
|
||||
do: Enum.map(thumbnails, &apply_thumbnail(image, image_thumb_dir(image), &1))
|
||||
do: Enum.map(thumbnails, &apply_thumbnail(image, &1))
|
||||
|
||||
defp apply_thumbnail(_image, thumb_dir, {:copy, new_file, destination}),
|
||||
do: copy(new_file, Path.join(thumb_dir, destination))
|
||||
|
||||
defp apply_thumbnail(image, thumb_dir, {:symlink_original, destination}),
|
||||
do: symlink(image_file(image), Path.join(thumb_dir, destination))
|
||||
defp apply_thumbnail(image, {:copy, new_file, destination}),
|
||||
do: upload_file(image, new_file, destination)
|
||||
|
||||
defp generate_dupe_reports(image) do
|
||||
if not image.duplication_checked do
|
||||
|
@ -86,65 +118,66 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
|> Repo.update!()
|
||||
end
|
||||
|
||||
# Copy from source to destination, creating parent directories along
|
||||
# the way and setting the appropriate permission bits when necessary.
|
||||
#
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
defp copy(source, destination) do
|
||||
prepare_dir(destination)
|
||||
defp download_image_file(image) do
|
||||
tempfile = Briefly.create!(extname: ".#{image.image_format}")
|
||||
path = Path.join(image_thumb_prefix(image), "full.#{image.image_format}")
|
||||
|
||||
File.rm(destination)
|
||||
File.cp!(source, destination)
|
||||
Objects.download_file(path, tempfile)
|
||||
|
||||
set_perms(destination)
|
||||
tempfile
|
||||
end
|
||||
|
||||
# Try to handle filesystems that don't support symlinks
|
||||
# by falling back to a copy.
|
||||
#
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
defp symlink(source, destination) do
|
||||
source = Path.absname(source)
|
||||
def upload_file(image, file, version_name) do
|
||||
path = Path.join(image_thumb_prefix(image), version_name)
|
||||
|
||||
prepare_dir(destination)
|
||||
|
||||
case File.ln_s(source, destination) do
|
||||
:ok ->
|
||||
set_perms(destination)
|
||||
|
||||
_err ->
|
||||
copy(source, destination)
|
||||
end
|
||||
Uploader.persist_file(path, file)
|
||||
end
|
||||
|
||||
# 0o644 = (S_IRUSR | S_IWUSR) | S_IRGRP | S_IROTH
|
||||
#
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
defp set_perms(destination),
|
||||
do: File.chmod(destination, 0o644)
|
||||
defp bulk_rename(file_names, source_prefix, target_prefix) do
|
||||
file_names
|
||||
|> Task.async_stream(
|
||||
fn name ->
|
||||
source = Path.join(source_prefix, name)
|
||||
target = Path.join(target_prefix, name)
|
||||
Objects.copy(source, target)
|
||||
|
||||
# Prepare the directory by creating it if it does not yet exist.
|
||||
#
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
defp prepare_dir(destination) do
|
||||
destination
|
||||
|> Path.dirname()
|
||||
|> File.mkdir_p!()
|
||||
name
|
||||
end,
|
||||
timeout: :infinity
|
||||
)
|
||||
|> Stream.map(fn {:ok, name} -> name end)
|
||||
|> bulk_delete(source_prefix)
|
||||
end
|
||||
|
||||
def image_file(%Image{image: image}),
|
||||
do: Path.join(image_file_root(), image)
|
||||
defp bulk_delete(file_names, prefix) do
|
||||
file_names
|
||||
|> Enum.map(&Path.join(prefix, &1))
|
||||
|> Objects.delete_multiple()
|
||||
end
|
||||
|
||||
def image_thumb_dir(%Image{
|
||||
created_at: created_at,
|
||||
id: id,
|
||||
hidden_from_users: true,
|
||||
hidden_image_key: key
|
||||
}),
|
||||
do: Path.join([image_thumbnail_root(), time_identifier(created_at), "#{id}-#{key}"])
|
||||
def all_versions(image) do
|
||||
generated = Processors.versions(image.image_mime_type, generated_sizes(image))
|
||||
full = ["full.#{image.image_format}"]
|
||||
|
||||
def image_thumb_dir(%Image{created_at: created_at, id: id}),
|
||||
do: Path.join([image_thumbnail_root(), time_identifier(created_at), to_string(id)])
|
||||
generated ++ full
|
||||
end
|
||||
|
||||
# This method wraps the following two for code that doesn't care
|
||||
# and just wants the files (most code should take this path)
|
||||
|
||||
def image_thumb_prefix(%{hidden_from_users: true} = image),
|
||||
do: hidden_image_thumb_prefix(image, image.hidden_image_key)
|
||||
|
||||
def image_thumb_prefix(image),
|
||||
do: visible_image_thumb_prefix(image)
|
||||
|
||||
# These methods handle the actual distinction between the two
|
||||
|
||||
defp hidden_image_thumb_prefix(%Image{created_at: created_at, id: id}, key),
|
||||
do: Path.join([image_file_root(), time_identifier(created_at), "#{id}-#{key}"])
|
||||
|
||||
defp visible_image_thumb_prefix(%Image{created_at: created_at, id: id}),
|
||||
do: Path.join([image_file_root(), time_identifier(created_at), to_string(id)])
|
||||
|
||||
defp image_url_base(%Image{created_at: created_at, id: id}, nil),
|
||||
do: Path.join([image_url_root(), time_identifier(created_at), to_string(id)])
|
||||
|
@ -156,11 +189,8 @@ defmodule Philomena.Images.Thumbnailer do
|
|||
do: Enum.join([time.year, time.month, time.day], "/")
|
||||
|
||||
defp image_file_root,
|
||||
do: Application.get_env(:philomena, :image_file_root)
|
||||
|
||||
defp image_thumbnail_root,
|
||||
do: Application.get_env(:philomena, :image_file_root) <> "/thumbs"
|
||||
do: Application.fetch_env!(:philomena, :image_file_root)
|
||||
|
||||
defp image_url_root,
|
||||
do: Application.get_env(:philomena, :image_url_root)
|
||||
do: Application.fetch_env!(:philomena, :image_url_root)
|
||||
end
|
||||
|
|
|
@ -3,6 +3,7 @@ defmodule Philomena.Images.Uploader do
|
|||
Upload and processing callback logic for Images.
|
||||
"""
|
||||
|
||||
alias Philomena.Images.Thumbnailer
|
||||
alias Philomena.Images.Image
|
||||
alias Philomena.Uploader
|
||||
|
||||
|
@ -11,14 +12,6 @@ defmodule Philomena.Images.Uploader do
|
|||
end
|
||||
|
||||
def persist_upload(image) do
|
||||
Uploader.persist_upload(image, image_file_root(), "image")
|
||||
end
|
||||
|
||||
def unpersist_old_upload(image) do
|
||||
Uploader.unpersist_old_upload(image, image_file_root(), "image")
|
||||
end
|
||||
|
||||
defp image_file_root do
|
||||
Application.get_env(:philomena, :image_file_root)
|
||||
Thumbnailer.upload_file(image, image.uploaded_image, "full.#{image.image_format}")
|
||||
end
|
||||
end
|
||||
|
|
154
lib/philomena/objects.ex
Normal file
154
lib/philomena/objects.ex
Normal file
|
@ -0,0 +1,154 @@
|
|||
defmodule Philomena.Objects do
|
||||
@moduledoc """
|
||||
Replication wrapper for object storage backends.
|
||||
"""
|
||||
alias Philomena.Mime
|
||||
require Logger
|
||||
|
||||
#
|
||||
# Fetch a key from the storage backend and
|
||||
# write it into the destination file.
|
||||
#
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
@spec download_file(String.t(), String.t()) :: any()
|
||||
def download_file(key, file_path) do
|
||||
contents =
|
||||
backends()
|
||||
|> Enum.find_value(fn opts ->
|
||||
ExAws.S3.get_object(opts[:bucket], key)
|
||||
|> ExAws.request(opts[:config_overrides])
|
||||
|> case do
|
||||
{:ok, result} -> result
|
||||
_ -> nil
|
||||
end
|
||||
end)
|
||||
|
||||
File.write!(file_path, contents.body)
|
||||
end
|
||||
|
||||
#
|
||||
# Upload a file using a single API call, writing the
|
||||
# contents from the given path to storage.
|
||||
#
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
@spec put(String.t(), String.t()) :: any()
|
||||
def put(key, file_path) do
|
||||
{_, mime} = Mime.file(file_path)
|
||||
contents = File.read!(file_path)
|
||||
|
||||
run_all(fn opts ->
|
||||
ExAws.S3.put_object(opts[:bucket], key, contents, content_type: mime)
|
||||
|> ExAws.request!(opts[:config_overrides])
|
||||
end)
|
||||
end
|
||||
|
||||
#
|
||||
# Upload a file using multiple API calls, writing the
|
||||
# contents from the given path to storage.
|
||||
#
|
||||
@spec upload(String.t(), String.t()) :: any()
|
||||
def upload(key, file_path) do
|
||||
{_, mime} = Mime.file(file_path)
|
||||
|
||||
run_all(fn opts ->
|
||||
file_path
|
||||
|> ExAws.S3.Upload.stream_file()
|
||||
|> ExAws.S3.upload(opts[:bucket], key, content_type: mime, max_concurrency: 2)
|
||||
|> ExAws.request!(opts[:config_overrides])
|
||||
end)
|
||||
end
|
||||
|
||||
#
|
||||
# Copies a key from the source to the destination,
|
||||
# overwriting the destination object if its exists.
|
||||
#
|
||||
@spec copy(String.t(), String.t()) :: any()
|
||||
def copy(source_key, dest_key) do
|
||||
# Potential workaround for inconsistent PutObjectCopy on R2
|
||||
#
|
||||
# run_all(fn opts->
|
||||
# ExAws.S3.put_object_copy(opts[:bucket], dest_key, opts[:bucket], source_key)
|
||||
# |> ExAws.request!(opts[:config_overrides])
|
||||
# end)
|
||||
|
||||
try do
|
||||
file_path = Briefly.create!()
|
||||
download_file(source_key, file_path)
|
||||
upload(dest_key, file_path)
|
||||
catch
|
||||
_kind, _value -> Logger.warn("Failed to copy #{source_key} -> #{dest_key}")
|
||||
end
|
||||
end
|
||||
|
||||
#
|
||||
# Removes the key from storage.
|
||||
#
|
||||
@spec delete(String.t()) :: any()
|
||||
def delete(key) do
|
||||
run_all(fn opts ->
|
||||
ExAws.S3.delete_object(opts[:bucket], key)
|
||||
|> ExAws.request!(opts[:config_overrides])
|
||||
end)
|
||||
end
|
||||
|
||||
#
|
||||
# Removes all given keys from storage.
|
||||
#
|
||||
@spec delete_multiple([String.t()]) :: any()
|
||||
def delete_multiple(keys) do
|
||||
run_all(fn opts ->
|
||||
ExAws.S3.delete_multiple_objects(opts[:bucket], keys)
|
||||
|> ExAws.request!(opts[:config_overrides])
|
||||
end)
|
||||
end
|
||||
|
||||
defp run_all(wrapped) do
|
||||
fun = fn opts ->
|
||||
try do
|
||||
wrapped.(opts)
|
||||
:ok
|
||||
catch
|
||||
_kind, _value -> :error
|
||||
end
|
||||
end
|
||||
|
||||
backends()
|
||||
|> Task.async_stream(fun, timeout: :infinity)
|
||||
|> Enum.any?(fn {_, v} -> v == :error end)
|
||||
|> case do
|
||||
true ->
|
||||
Logger.warn("Failed to operate on all backends")
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp backends do
|
||||
primary_opts() ++ replica_opts()
|
||||
end
|
||||
|
||||
defp primary_opts do
|
||||
[
|
||||
%{
|
||||
config_overrides: Application.fetch_env!(:philomena, :s3_primary_options),
|
||||
bucket: Application.fetch_env!(:philomena, :s3_primary_bucket)
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
defp replica_opts do
|
||||
replica_bucket = Application.get_env(:philomena, :s3_secondary_bucket)
|
||||
|
||||
if not is_nil(replica_bucket) do
|
||||
[
|
||||
%{
|
||||
config_overrides: Application.fetch_env!(:philomena, :s3_secondary_options),
|
||||
bucket: replica_bucket
|
||||
}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -40,6 +40,15 @@ defmodule Philomena.Processors do
|
|||
def processor("video/webm"), do: Webm
|
||||
def processor(_content_type), do: nil
|
||||
|
||||
@doc """
|
||||
Takes a MIME type and version list and generates a list of versions to be
|
||||
generated (e.g., ["thumb.png"]). List contents differ based on file type.
|
||||
"""
|
||||
@spec versions(String.t(), keyword) :: [String.t()]
|
||||
def versions(mime_type, valid_sizes) do
|
||||
processor(mime_type).versions(valid_sizes)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Takes an analyzer, file path, and version list and runs the appropriate
|
||||
processor's process/3.
|
||||
|
|
|
@ -1,19 +1,25 @@
|
|||
defmodule Philomena.Processors.Gif do
|
||||
alias Philomena.Intensities
|
||||
|
||||
def versions(sizes) do
|
||||
sizes
|
||||
|> Enum.map(fn {name, _} -> "#{name}.gif" end)
|
||||
|> Kernel.++(["full.webm", "full.mp4", "rendered.png"])
|
||||
end
|
||||
|
||||
def process(analysis, file, versions) do
|
||||
dimensions = analysis.dimensions
|
||||
duration = analysis.duration
|
||||
preview = preview(duration, file)
|
||||
palette = palette(file)
|
||||
|
||||
{:ok, intensities} = Intensities.file(preview)
|
||||
|
||||
scaled = Enum.flat_map(versions, &scale_if_smaller(palette, file, dimensions, &1))
|
||||
scaled = Enum.flat_map(versions, &scale(palette, file, &1))
|
||||
videos = generate_videos(file)
|
||||
|
||||
%{
|
||||
intensities: intensities,
|
||||
thumbnails: scaled ++ [{:copy, preview, "rendered.png"}]
|
||||
thumbnails: scaled ++ videos ++ [{:copy, preview, "rendered.png"}]
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -60,27 +66,7 @@ defmodule Philomena.Processors.Gif do
|
|||
palette
|
||||
end
|
||||
|
||||
# Generate full version, and WebM and MP4 previews
|
||||
defp scale_if_smaller(_palette, file, _dimensions, {:full, _target_dim}) do
|
||||
[{:symlink_original, "full.gif"}] ++ generate_videos(file)
|
||||
end
|
||||
|
||||
defp scale_if_smaller(
|
||||
palette,
|
||||
file,
|
||||
{width, height},
|
||||
{thumb_name, {target_width, target_height}}
|
||||
) do
|
||||
if width > target_width or height > target_height do
|
||||
scaled = scale(palette, file, {target_width, target_height})
|
||||
|
||||
[{:copy, scaled, "#{thumb_name}.gif"}]
|
||||
else
|
||||
[{:symlink_original, "#{thumb_name}.gif"}]
|
||||
end
|
||||
end
|
||||
|
||||
defp scale(palette, file, {width, height}) do
|
||||
defp scale(palette, file, {thumb_name, {width, height}}) do
|
||||
scaled = Briefly.create!(extname: ".gif")
|
||||
|
||||
scale_filter = "scale=w=#{width}:h=#{height}:force_original_aspect_ratio=decrease"
|
||||
|
@ -104,7 +90,7 @@ defmodule Philomena.Processors.Gif do
|
|||
scaled
|
||||
])
|
||||
|
||||
scaled
|
||||
[{:copy, scaled, "#{thumb_name}.gif"}]
|
||||
end
|
||||
|
||||
defp generate_videos(file) do
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
defmodule Philomena.Processors.Jpeg do
|
||||
alias Philomena.Intensities
|
||||
|
||||
def process(analysis, file, versions) do
|
||||
dimensions = analysis.dimensions
|
||||
def versions(sizes) do
|
||||
Enum.map(sizes, fn {name, _} -> "#{name}.jpg" end)
|
||||
end
|
||||
|
||||
def process(_analysis, file, versions) do
|
||||
stripped = optimize(strip(file))
|
||||
|
||||
{:ok, intensities} = Intensities.file(stripped)
|
||||
|
||||
scaled = Enum.flat_map(versions, &scale_if_smaller(stripped, dimensions, &1))
|
||||
scaled = Enum.flat_map(versions, &scale(stripped, &1))
|
||||
|
||||
%{
|
||||
replace_original: stripped,
|
||||
|
@ -68,21 +71,7 @@ defmodule Philomena.Processors.Jpeg do
|
|||
optimized
|
||||
end
|
||||
|
||||
defp scale_if_smaller(_file, _dimensions, {:full, _target_dim}) do
|
||||
[{:symlink_original, "full.jpg"}]
|
||||
end
|
||||
|
||||
defp scale_if_smaller(file, {width, height}, {thumb_name, {target_width, target_height}}) do
|
||||
if width > target_width or height > target_height do
|
||||
scaled = scale(file, {target_width, target_height})
|
||||
|
||||
[{:copy, scaled, "#{thumb_name}.jpg"}]
|
||||
else
|
||||
[{:symlink_original, "#{thumb_name}.jpg"}]
|
||||
end
|
||||
end
|
||||
|
||||
defp scale(file, {width, height}) do
|
||||
defp scale(file, {thumb_name, {width, height}}) do
|
||||
scaled = Briefly.create!(extname: ".jpg")
|
||||
scale_filter = "scale=w=#{width}:h=#{height}:force_original_aspect_ratio=decrease"
|
||||
|
||||
|
@ -102,7 +91,7 @@ defmodule Philomena.Processors.Jpeg do
|
|||
|
||||
{_output, 0} = System.cmd("jpegtran", ["-optimize", "-outfile", scaled, scaled])
|
||||
|
||||
scaled
|
||||
[{:copy, scaled, "#{thumb_name}.jpg"}]
|
||||
end
|
||||
|
||||
defp srgb_profile do
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
defmodule Philomena.Processors.Png do
|
||||
alias Philomena.Intensities
|
||||
|
||||
def versions(sizes) do
|
||||
Enum.map(sizes, fn {name, _} -> "#{name}.png" end)
|
||||
end
|
||||
|
||||
def process(analysis, file, versions) do
|
||||
dimensions = analysis.dimensions
|
||||
animated? = analysis.animated?
|
||||
|
||||
{:ok, intensities} = Intensities.file(file)
|
||||
|
||||
scaled = Enum.flat_map(versions, &scale_if_smaller(file, animated?, dimensions, &1))
|
||||
scaled = Enum.flat_map(versions, &scale(file, animated?, &1))
|
||||
|
||||
%{
|
||||
intensities: intensities,
|
||||
|
@ -43,26 +46,7 @@ defmodule Philomena.Processors.Png do
|
|||
optimized
|
||||
end
|
||||
|
||||
defp scale_if_smaller(_file, _animated?, _dimensions, {:full, _target_dim}) do
|
||||
[{:symlink_original, "full.png"}]
|
||||
end
|
||||
|
||||
defp scale_if_smaller(
|
||||
file,
|
||||
animated?,
|
||||
{width, height},
|
||||
{thumb_name, {target_width, target_height}}
|
||||
) do
|
||||
if width > target_width or height > target_height do
|
||||
scaled = scale(file, animated?, {target_width, target_height})
|
||||
|
||||
[{:copy, scaled, "#{thumb_name}.png"}]
|
||||
else
|
||||
[{:symlink_original, "#{thumb_name}.png"}]
|
||||
end
|
||||
end
|
||||
|
||||
defp scale(file, animated?, {width, height}) do
|
||||
defp scale(file, animated?, {thumb_name, {width, height}}) do
|
||||
scaled = Briefly.create!(extname: ".png")
|
||||
|
||||
scale_filter =
|
||||
|
@ -92,6 +76,6 @@ defmodule Philomena.Processors.Png do
|
|||
|
||||
System.cmd("optipng", ["-i0", "-o1", "-quiet", "-clobber", scaled])
|
||||
|
||||
scaled
|
||||
[{:copy, scaled, "#{thumb_name}.png"}]
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,16 +1,23 @@
|
|||
defmodule Philomena.Processors.Svg do
|
||||
alias Philomena.Intensities
|
||||
|
||||
def process(analysis, file, versions) do
|
||||
def versions(sizes) do
|
||||
sizes
|
||||
|> Enum.map(fn {name, _} -> "#{name}.png" end)
|
||||
|> Kernel.++(["rendered.png", "full.png"])
|
||||
end
|
||||
|
||||
def process(_analysis, file, versions) do
|
||||
preview = preview(file)
|
||||
|
||||
{:ok, intensities} = Intensities.file(preview)
|
||||
|
||||
scaled = Enum.flat_map(versions, &scale_if_smaller(file, analysis.dimensions, preview, &1))
|
||||
scaled = Enum.flat_map(versions, &scale(preview, &1))
|
||||
full = [{:copy, preview, "full.png"}]
|
||||
|
||||
%{
|
||||
intensities: intensities,
|
||||
thumbnails: scaled ++ [{:copy, preview, "rendered.png"}]
|
||||
thumbnails: scaled ++ full ++ [{:copy, preview, "rendered.png"}]
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -29,26 +36,7 @@ defmodule Philomena.Processors.Svg do
|
|||
preview
|
||||
end
|
||||
|
||||
defp scale_if_smaller(_file, _dimensions, preview, {:full, _target_dim}) do
|
||||
[{:symlink_original, "full.svg"}, {:copy, preview, "full.png"}]
|
||||
end
|
||||
|
||||
defp scale_if_smaller(
|
||||
_file,
|
||||
{width, height},
|
||||
preview,
|
||||
{thumb_name, {target_width, target_height}}
|
||||
) do
|
||||
if width > target_width or height > target_height do
|
||||
scaled = scale(preview, {target_width, target_height})
|
||||
|
||||
[{:copy, scaled, "#{thumb_name}.png"}]
|
||||
else
|
||||
[{:copy, preview, "#{thumb_name}.png"}]
|
||||
end
|
||||
end
|
||||
|
||||
defp scale(preview, {width, height}) do
|
||||
defp scale(preview, {thumb_name, {width, height}}) do
|
||||
scaled = Briefly.create!(extname: ".png")
|
||||
scale_filter = "scale=w=#{width}:h=#{height}:force_original_aspect_ratio=decrease"
|
||||
|
||||
|
@ -57,6 +45,6 @@ defmodule Philomena.Processors.Svg do
|
|||
|
||||
{_output, 0} = System.cmd("optipng", ["-i0", "-o1", "-quiet", "-clobber", scaled])
|
||||
|
||||
scaled
|
||||
[{:copy, scaled, "#{thumb_name}.png"}]
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2,6 +2,18 @@ defmodule Philomena.Processors.Webm do
|
|||
alias Philomena.Intensities
|
||||
import Bitwise
|
||||
|
||||
def versions(sizes) do
|
||||
webm_versions = Enum.map(sizes, fn {name, _} -> "#{name}.webm" end)
|
||||
mp4_versions = Enum.map(sizes, fn {name, _} -> "#{name}.mp4" end)
|
||||
|
||||
gif_versions =
|
||||
sizes
|
||||
|> Enum.filter(fn {name, _} -> name in [:thumb_tiny, :thumb_small, :thumb] end)
|
||||
|> Enum.map(fn {name, _} -> "#{name}.gif" end)
|
||||
|
||||
webm_versions ++ mp4_versions ++ gif_versions
|
||||
end
|
||||
|
||||
def process(analysis, file, versions) do
|
||||
dimensions = analysis.dimensions
|
||||
duration = analysis.duration
|
||||
|
@ -12,13 +24,13 @@ defmodule Philomena.Processors.Webm do
|
|||
|
||||
{:ok, intensities} = Intensities.file(preview)
|
||||
|
||||
scaled =
|
||||
Enum.flat_map(versions, &scale_if_smaller(stripped, mp4, palette, duration, dimensions, &1))
|
||||
scaled = Enum.flat_map(versions, &scale(stripped, palette, duration, dimensions, &1))
|
||||
mp4 = [{:copy, mp4, "full.mp4"}]
|
||||
|
||||
%{
|
||||
replace_original: stripped,
|
||||
intensities: intensities,
|
||||
thumbnails: scaled ++ [{:copy, preview, "rendered.png"}]
|
||||
thumbnails: scaled ++ mp4 ++ [{:copy, preview, "rendered.png"}]
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -59,31 +71,12 @@ defmodule Philomena.Processors.Webm do
|
|||
stripped
|
||||
end
|
||||
|
||||
defp scale_if_smaller(_file, mp4, _palette, _duration, _dimensions, {:full, _target_dim}) do
|
||||
[
|
||||
{:symlink_original, "full.webm"},
|
||||
{:copy, mp4, "full.mp4"}
|
||||
]
|
||||
end
|
||||
|
||||
defp scale_if_smaller(
|
||||
file,
|
||||
mp4,
|
||||
palette,
|
||||
duration,
|
||||
{width, height},
|
||||
{thumb_name, {target_width, target_height}}
|
||||
) do
|
||||
{webm, mp4} =
|
||||
if width > target_width or height > target_height do
|
||||
scale_videos(file, {width, height}, {target_width, target_height})
|
||||
else
|
||||
{file, mp4}
|
||||
end
|
||||
defp scale(file, palette, duration, dimensions, {thumb_name, target_dimensions}) do
|
||||
{webm, mp4} = scale_videos(file, dimensions, target_dimensions)
|
||||
|
||||
cond do
|
||||
thumb_name in [:thumb, :thumb_small, :thumb_tiny] ->
|
||||
gif = scale_gif(file, palette, duration, {target_width, target_height})
|
||||
gif = scale_gif(file, palette, duration, target_dimensions)
|
||||
|
||||
[
|
||||
{:copy, webm, "#{thumb_name}.webm"},
|
||||
|
|
|
@ -5,6 +5,7 @@ defmodule Philomena.Uploader do
|
|||
|
||||
alias Philomena.Filename
|
||||
alias Philomena.Analyzers
|
||||
alias Philomena.Objects
|
||||
alias Philomena.Sha512
|
||||
import Ecto.Changeset
|
||||
|
||||
|
@ -58,18 +59,20 @@ defmodule Philomena.Uploader do
|
|||
in the transaction.
|
||||
"""
|
||||
@spec persist_upload(any(), String.t(), String.t()) :: any()
|
||||
|
||||
# sobelow_skip ["Traversal"]
|
||||
def persist_upload(model, file_root, field_name) do
|
||||
source = Map.get(model, field(upload_key(field_name)))
|
||||
dest = Map.get(model, field(field_name))
|
||||
target = Path.join(file_root, dest)
|
||||
dir = Path.dirname(target)
|
||||
|
||||
# Create the target directory if it doesn't exist yet,
|
||||
# then write the file.
|
||||
File.mkdir_p!(dir)
|
||||
File.cp!(source, target)
|
||||
persist_file(target, source)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Persist an arbitrary file to storage at the given path with the correct
|
||||
content type and permissions.
|
||||
"""
|
||||
def persist_file(path, file) do
|
||||
Objects.upload(path, file)
|
||||
end
|
||||
|
||||
@doc """
|
||||
|
@ -107,8 +110,9 @@ defmodule Philomena.Uploader do
|
|||
defp try_remove("", _file_root), do: nil
|
||||
defp try_remove(nil, _file_root), do: nil
|
||||
|
||||
# sobelow_skip ["Traversal.FileModule"]
|
||||
defp try_remove(file, file_root), do: File.rm(Path.join(file_root, file))
|
||||
defp try_remove(file, file_root) do
|
||||
Objects.delete(Path.join(file_root, file))
|
||||
end
|
||||
|
||||
defp prefix_attributes(map, prefix),
|
||||
do: Map.new(map, fn {key, value} -> {"#{prefix}_#{key}", value} end)
|
||||
|
|
|
@ -10,6 +10,8 @@ defmodule PhilomenaWeb.Image.FileController do
|
|||
plug PhilomenaWeb.ScraperPlug, params_name: "image", params_key: "image"
|
||||
|
||||
def update(conn, %{"image" => image_params}) do
|
||||
Images.remove_hash(conn.assigns.image)
|
||||
|
||||
case Images.update_file(conn.assigns.image, image_params) do
|
||||
{:ok, image} ->
|
||||
conn
|
||||
|
|
|
@ -34,7 +34,7 @@ defmodule PhilomenaWeb.ScraperPlug do
|
|||
params_name = Keyword.get(opts, :params_name, "image")
|
||||
params_key = Keyword.get(opts, :params_key, "image")
|
||||
name = extract_filename(url, headers)
|
||||
file = Briefly.create!()
|
||||
file = Plug.Upload.random_file!(UUID.uuid1())
|
||||
|
||||
File.write!(file, body)
|
||||
|
||||
|
|
4
mix.exs
4
mix.exs
|
@ -69,6 +69,10 @@ defmodule Philomena.MixProject do
|
|||
{:castore, "~> 0.1"},
|
||||
{:mint, "~> 1.2"},
|
||||
{:exq, "~> 0.14"},
|
||||
{:ex_aws, "~> 2.0",
|
||||
github: "liamwhite/ex_aws", ref: "a340859dd8ac4d63bd7a3948f0994e493e49bda4", override: true},
|
||||
{:ex_aws_s3, "~> 2.0"},
|
||||
{:sweet_xml, "~> 0.7"},
|
||||
|
||||
# Markdown
|
||||
{:rustler, "~> 0.22"},
|
||||
|
|
3
mix.lock
3
mix.lock
|
@ -27,6 +27,8 @@
|
|||
"elixir_make": {:hex, :elixir_make, "0.6.3", "bc07d53221216838d79e03a8019d0839786703129599e9619f4ab74c8c096eac", [:mix], [], "hexpm", "f5cbd651c5678bcaabdbb7857658ee106b12509cd976c2c2fca99688e1daf716"},
|
||||
"elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"},
|
||||
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
||||
"ex_aws": {:git, "https://github.com/liamwhite/ex_aws.git", "a340859dd8ac4d63bd7a3948f0994e493e49bda4", [ref: "a340859dd8ac4d63bd7a3948f0994e493e49bda4"]},
|
||||
"ex_aws_s3": {:hex, :ex_aws_s3, "2.3.3", "61412e524616ea31d3f31675d8bc4c73f277e367dee0ae8245610446f9b778aa", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "0044f0b6f9ce925666021eafd630de64c2b3404d79c85245cc7c8a9a32d7f104"},
|
||||
"exq": {:hex, :exq, "0.16.2", "601c0486ce5eec5bcbda882b989a1d65a3611b729d8a92e402a77c87a0c367d8", [:mix], [{:elixir_uuid, ">= 1.2.0", [hex: :elixir_uuid, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0 and < 5.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:redix, ">= 0.9.0", [hex: :redix, repo: "hexpm", optional: false]}], "hexpm", "7a0c5ff3d305c4dfb5a02d4c49f13a528e82039059716c70085ad10dfce7d018"},
|
||||
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
|
||||
"gen_smtp": {:hex, :gen_smtp, "1.1.1", "bf9303c31735100631b1d708d629e4c65944319d1143b5c9952054f4a1311d85", [:rebar3], [{:hut, "1.3.0", [hex: :hut, repo: "hexpm", optional: false]}, {:ranch, ">= 1.7.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "51bc50cc017efd4a4248cbc39ea30fb60efa7d4a49688986fafad84434ff9ab7"},
|
||||
|
@ -75,6 +77,7 @@
|
|||
"slime": {:hex, :slime, "1.3.0", "153cebb4a837efaf55fb09dff0d79374ad74af835a0288feccbfd9cf606446f9", [:mix], [{:neotoma, "~> 1.7", [hex: :neotoma, repo: "hexpm", optional: false]}], "hexpm", "303b58f05d740a5fe45165bcadfe01da174f1d294069d09ebd7374cd36990a27"},
|
||||
"sobelow": {:hex, :sobelow, "0.11.1", "23438964486f8112b41e743bbfd402da3e5b296fdc9eacab29914b79c48916dd", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "9897363a7eff96f4809304a90aad819e2ad5e5d24db547af502885146746a53c"},
|
||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
|
||||
"sweet_xml": {:hex, :sweet_xml, "0.7.3", "debb256781c75ff6a8c5cbf7981146312b66f044a2898f453709a53e5031b45b", [:mix], [], "hexpm", "e110c867a1b3fe74bfc7dd9893aa851f0eed5518d0d7cad76d7baafd30e4f5ba"},
|
||||
"telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"},
|
||||
"tesla": {:hex, :tesla, "1.4.4", "bb89aa0c9745190930366f6a2ac612cdf2d0e4d7fff449861baa7875afd797b2", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.3", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "d5503a49f9dec1b287567ea8712d085947e247cb11b06bc54adb05bfde466457"},
|
||||
"toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"},
|
||||
|
|
Loading…
Reference in a new issue