feat: Add object_storage engine and social media preview image generation

This commit is contained in:
Robert Prehn 2021-10-19 19:28:25 +00:00
parent 1f0a27161c
commit 9f407e91d3
98 changed files with 5075 additions and 109 deletions

3
.gitignore vendored
View file

@ -33,6 +33,9 @@ node_modules
# this depending on your deployment strategy.
/priv/static/
# Temp files generated by tests
/apps/*/priv/test/
# Mnesia DBs
/apps/*/priv/mnesia*
/priv/mnesia*

View file

@ -4,16 +4,19 @@ defmodule Legendary.Admin.Kaffy.EditorExtension do
markdown editor library.
"""
import Phoenix.HTML.Tag, only: [tag: 2]
def stylesheets(_conn) do
[
{:safe, ~s(<link rel="stylesheet" href="/css/content-editor.css" />)},
{:safe, ~s(<link rel="stylesheet" href="/css/admin.css" />)},
{:safe, ~s(<link rel="stylesheet" href="/css/app.css" />)},
tag(:meta, property: "og:site_name", content: Legendary.I18n.t!("en", "site.title"))
]
end
def javascripts(_conn) do
[
{:safe, ~s(<script src="/js/content-editor.js"></script>)},
{:safe, ~s(<script src="/js/admin.js"></script>)},
{:safe, ~s(<script src="/js/app.js"></script>)},
]
end

View file

@ -12,7 +12,7 @@ defmodule Legendary.Admin.MixProject do
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,

View file

@ -0,0 +1,5 @@
@import "content-editor-overrides";
.social-media-preview-image.social-media-preview-image {
border-radius: 16px !important;
}

View file

@ -0,0 +1,4 @@
import "../css/admin.css";
import "./admin/content-editor";
import "./admin/preview-image";

View file

@ -1,7 +1,7 @@
import { ready } from "./utils";
import { ready } from "../utils";
import SimpleMDE from "simplemde";
import "simplemde/dist/simplemde.min.css";
import "../css/content-editor-overrides.css";
import "../../css/content-editor-overrides.css";
const requestPreview = (plainText, previewContainer) => {
let request = new XMLHttpRequest();

View file

@ -0,0 +1,140 @@
import { ready } from "../utils";
import { fabric } from "fabric";
fabric.Object.prototype.objectCaching = false;
const textboxDefaults = {
lockMovementX: true,
lockMovementY: true,
lockScalingX: true,
lockScalingY: true,
lockSkewingX: true,
lockSkewingY: true,
lockRotation: true,
lockUniScaling: true,
hasControls: false,
selectable: true,
fontFamily:
"system-ui,-apple-system,'Segoe UI',Roboto,Helvetica,Arial,sans-serif,'Apple Color Emoji','Segoe UI Emoji'",
};
const updateDataURL = (canvas) => {
canvas.renderAll();
const data = canvas.toDataURL({
enableRetinaScaling: true,
});
canvas.dataURLInput.value = data;
};
const setTextboxValue = (canvas, textbox, value) => {
textbox.set({ text: value });
updateDataURL(canvas);
};
const setTextboxFromEvent = (canvas, textbox, { target }) => {
setTextboxValue(canvas, textbox, target.value);
};
const makeLinkedTextbox = (canvas, selector, opts) => {
const box = new fabric.Textbox("", {
...textboxDefaults,
...opts,
});
box.on("input", updateDataURL.bind(box, canvas));
var input = document.querySelector(selector);
canvas.add(box);
setTextboxValue(canvas, box, input.value);
input.addEventListener("input", setTextboxFromEvent.bind(input, canvas, box));
return box;
};
const makeStaticTextbox = (canvas, value, opts) => {
const box = new fabric.Textbox(value, {
...textboxDefaults,
...opts,
});
box.on("input", updateDataURL.bind(box, canvas));
canvas.add(box);
return box;
};
const prepareCanvas = (input, canvasElement) => {
const inputContainer = input.parentElement;
input.setAttribute("type", "hidden");
canvasElement.setAttribute(
"class",
"social-media-preview-image rounded-lg border-2 border-gray-300"
);
canvasElement.setAttribute("width", 800);
canvasElement.setAttribute("height", 418);
inputContainer.appendChild(canvasElement);
const canvas = new fabric.Canvas(canvasElement);
canvas.dataURLInput = input;
return canvas;
};
ready(() => {
const input = document.querySelector(
"[name='post[social_media_preview_image]']"
);
const canvasElement = document.createElement("canvas");
const canvas = prepareCanvas(input, canvasElement);
fabric.Image.fromURL(
"/images/social-media-preview-background.png",
function (oImg) {
oImg.selectable = false;
canvas.add(oImg);
const title = makeLinkedTextbox(canvas, "[name='post[title]']", {
left: 80,
top: 80,
width: 640,
fontWeight: "bold",
fontSize: 36,
});
makeLinkedTextbox(canvas, "[name='post[excerpt]']", {
left: 80,
width: 560,
top: title.aCoords.bl.y + 20,
fill: "#4B5563",
fontSize: 18,
});
var name = document
.querySelector("[property='og:site_name']")
.getAttribute("content");
makeStaticTextbox(canvas, name, {
left: 80,
width: 560,
top: 48,
fill: "#F87171",
fontSize: 18,
fontWeight: "bold",
});
var rect = new fabric.Rect({
left: 0,
top: 0,
fill: "#F87171",
width: 14,
height: 418,
selectable: false,
});
canvas.add(rect);
updateDataURL(canvas);
}
);
});

File diff suppressed because it is too large Load diff

View file

@ -12,6 +12,7 @@
"@fortawesome/fontawesome-free": "^5.14.0",
"alpinejs": "^2.8.1",
"autoprefixer": "^9.8.6",
"fabric": "^4.6.0",
"glob": "^7.1.6",
"npm-force-resolutions": "^0.0.10",
"phoenix": "file:/../../../deps/phoenix",

Binary file not shown.

After

Width:  |  Height:  |  Size: 125 KiB

View file

@ -21,7 +21,7 @@ module.exports = (env, options) => {
devtool: devMode ? "source-map" : undefined,
entry: {
app: glob.sync("./vendor/**/*.js").concat(["./js/app.js"]),
"content-editor": ["./js/content-editor.js"],
admin: ["./js/admin.js"],
},
output: {
filename: "js/[name].js",

View file

@ -31,7 +31,7 @@ defmodule AppWeb.Endpoint do
at: "/",
from: :app,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
only: ~w(css fonts images js favicon.ico robots.txt public_uploads)
plug Plug.Static,
at: "/kaffy",

View file

@ -64,5 +64,6 @@ defmodule AppWeb.Router do
use Legendary.Core.Routes
use Legendary.Admin.Routes
use Legendary.ObjectStorageWeb.Routes
use Legendary.Content.Routes
end

View file

@ -23,6 +23,4 @@
<!-- <meta property="fb:admins" content="Facebook numberic ID" /> -->
<!-- Preview Images -->
<!-- <meta itemprop="image" content="http://www.example.com/image.jpg"> -->
<!-- <meta name="twitter:image:src" content="http://www.example.com/image.jpg"> -->
<!-- <meta property="og:image" content="http://example.com/image.jpg" /> -->
<%= preview_image_tags(@conn, assigns) %>

View file

@ -1,6 +1,8 @@
defmodule AppWeb.LayoutView do
use AppWeb, :view
alias Legendary.ContentWeb.Uploaders.SocialMediaPreview
def title(conn, assigns), do: title(view_module(conn), view_template(conn), assigns)
def title(view, template, %{post: post}), do: "#{post.title} | #{title(view, template, nil)}"
@ -54,4 +56,18 @@ defmodule AppWeb.LayoutView do
def published_tag(_, _, _) do
nil
end
def preview_image_tags(_conn, %{post: post}) do
url = SocialMediaPreview.url({"original.png", post}, :original)
[
tag(:meta, itemprop: "image", content: url),
tag(:meta, name: "twitter:image:src", content: url),
tag(:meta, property: "og:image", content: url)
]
end
def preview_image_tags(_, _) do
nil
end
end

View file

@ -9,7 +9,7 @@ defmodule App.MixProject do
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
@ -42,6 +42,7 @@ defmodule App.MixProject do
{:admin, in_umbrella: true},
{:content, in_umbrella: true},
{:core, in_umbrella: true},
{:object_storage, in_umbrella: true},
{:ecto_sql, "~> 3.7"},
{:excoveralls, "~> 0.10", only: [:dev, :test]},
{:floki, ">= 0.30.0"},

View file

@ -7,6 +7,7 @@ defmodule App.LayoutViewTest do
alias Legendary.Content.Post
@post %Post{
name: "test-slug",
title: "Test Post",
excerpt: "This is a test post.",
modified_gmt: ~N[2021-09-17T00:00:00],
@ -66,4 +67,19 @@ defmodule App.LayoutViewTest do
assert published_tag(nil, nil, nil) == nil
end
end
describe "preview_image_tags/2" do
test "for a post" do
markup =
preview_image_tags(nil, %{post: @post})
|> Enum.map(&safe_to_string/1)
|> Enum.join("")
assert markup =~ "/public_uploads/content/posts/preview_images/test-slug/original.png"
end
test "without a post" do
assert preview_image_tags(nil, nil) == nil
end
end
end

View file

@ -3,6 +3,8 @@ defmodule Legendary.Content.PostAdmin do
Custom admin logic for content posts and pages.
"""
alias Legendary.Content.{Post, Posts.PreviewImages}
import Ecto.Query, only: [from: 2]
def singular_name(_) do
@ -14,11 +16,15 @@ defmodule Legendary.Content.PostAdmin do
end
def create_changeset(schema, attrs) do
Legendary.Content.Post.changeset(schema, attrs)
schema
|> Post.changeset(attrs)
|> PreviewImages.handle_preview_image_upload(attrs)
end
def update_changeset(schema, attrs) do
Legendary.Content.Post.changeset(schema, attrs)
schema
|> Post.changeset(attrs)
|> PreviewImages.handle_preview_image_upload(attrs)
end
def index(_) do
@ -58,6 +64,7 @@ defmodule Legendary.Content.PostAdmin do
comment_status: %{choices: [{"open", :open}, {"closed", :closed}]},
ping_status: %{choices: [{"open", :open}, {"closed", :closed}]},
menu_order: nil,
social_media_preview_image: %{type: :hidden},
]
end
end

View file

@ -0,0 +1,30 @@
defmodule Legendary.Content.Posts.PreviewImages do
@moduledoc """
Handles storing social media preview images which are submitted as data uris
in the social_media_preview_image field.
"""
alias Ecto.Changeset
alias Legendary.ContentWeb.Uploaders.SocialMediaPreview
alias Legendary.CoreWeb.Base64Uploads
def handle_preview_image_upload(changeset, attrs) do
upload =
case attrs do
%{"social_media_preview_image" => data} when is_binary(data) ->
Base64Uploads.data_uri_to_upload(data)
_ -> nil
end
case upload do
nil ->
changeset
%Plug.Upload{} ->
name = Changeset.get_field(changeset, :name)
{:ok, _filename} = SocialMediaPreview.store({upload, %{name: name}})
changeset
:error ->
changeset
|> Changeset.add_error(:social_media_preview_image, "is malformed")
end
end
end

View file

@ -0,0 +1,18 @@
defmodule Legendary.ContentWeb.Uploaders.SocialMediaPreview do
@moduledoc """
Uploader definition for social media preview images.
"""
use Waffle.Definition
@versions [:original]
# Override the persisted filenames:
def filename(version, _) do
Atom.to_string(version)
end
# Override the storage directory:
def storage_dir(_version, {_file, %{name: name}}) do
"public_uploads/content/posts/preview_images/#{name}"
end
end

View file

@ -11,7 +11,7 @@ defmodule Legendary.Content.MixProject do
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,

View file

@ -0,0 +1,36 @@
defmodule Legendary.Content.Posts.PreviewImagesTest do
use Legendary.Content.DataCase
import Legendary.Content.Posts.PreviewImages
alias Legendary.Content.Post
@png "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAAXNSR0IArs4c6QAAAMJlWElmTU0AKgAAAAgABwESAAMAAAABAAEAAAEaAAUAAAABAAAAYgEbAAUAAAABAAAAagEoAAMAAAABAAIAAAExAAIAAAARAAAAcgEyAAIAAAAUAAAAhIdpAAQAAAABAAAAmAAAAAAAAABIAAAAAQAAAEgAAAABUGl4ZWxtYXRvciAzLjkuOAAAMjAyMTowOToyMiAxNTowOTowMQAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAAaADAAQAAAABAAAAAQAAAAAYjzhKAAAACXBIWXMAAAsTAAALEwEAmpwYAAADpmlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyI+CiAgICAgICAgIDx0aWZmOkNvbXByZXNzaW9uPjA8L3RpZmY6Q29tcHJlc3Npb24+CiAgICAgICAgIDx0aWZmOlJlc29sdXRpb25Vbml0PjI8L3RpZmY6UmVzb2x1dGlvblVuaXQ+CiAgICAgICAgIDx0aWZmOlhSZXNvbHV0aW9uPjcyPC90aWZmOlhSZXNvbHV0aW9uPgogICAgICAgICA8dGlmZjpZUmVzb2x1dGlvbj43MjwvdGlmZjpZUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgICAgPGV4aWY6UGl4ZWxYRGltZW5zaW9uPjE8L2V4aWY6UGl4ZWxYRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpDb2xvclNwYWNlPjE8L2V4aWY6Q29sb3JTcGFjZT4KICAgICAgICAgPGV4aWY6UGl4ZWxZRGltZW5zaW9uPjE8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8eG1wOkNyZWF0b3JUb29sPlBpeGVsbWF0b3IgMy45Ljg8L3htcDpDcmVhdG9yVG9vbD4KICAgICAgICAgPHhtcDpNb2RpZnlEYXRlPjIwMjEtMDktMjJUMTU6MDk6MDE8L3htcDpNb2RpZnlEYXRlPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4K/LzAdAAAAAxJREFUCB1j+P//PwAF/gL+n8otEwAAAABJRU5ErkJggg=="
@bad_png "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAApJREFUCNdjYAAAAAIAAeIhvDMAAAAASUVORK5CYII%3D"
@path "priv/test/static/public_uploads/content/posts/preview_images/preview-image-test/original.png"
describe "handle_preview_image_upload/2" do
test "stores an image given as a data uri" do
File.rm(@path)
changeset =
%Post{name: "preview-image-test", status: :draft}
|> Post.changeset()
assert handle_preview_image_upload(changeset, %{"social_media_preview_image" => @png}).valid?
assert File.exists?(@path)
end
test "adds a validation error if the image is invalid" do
File.rm(@path)
changeset =
%Post{name: "bad-image-test", status: :draft}
|> Post.changeset()
refute handle_preview_image_upload(changeset, %{"social_media_preview_image" => @bad_png}).valid?
refute File.exists?(@path)
end
end
end

View file

@ -0,0 +1,17 @@
defmodule Legendary.ContentWeb.Uploaders.SocialMediaPreviewTest do
use Legendary.Content.DataCase
import Legendary.ContentWeb.Uploaders.SocialMediaPreview
describe "filename/2" do
test "" do
assert filename(:original, {%{file_name: "original.png"}, nil}) =~ "original"
end
end
describe "storage_dir/2" do
test "" do
assert storage_dir(nil, {nil, %{name: "test-slug"}}) =~ "public_uploads/content/posts/preview_images/test-slug"
end
end
end

View file

@ -0,0 +1,48 @@
defmodule Legendary.CoreWeb.Base64Uploads do
@moduledoc """
Utilities for converting data uris and base64 strings to Plug.Upload structs
so they can be processed in the same way as files submitted by multipart forms.
"""
def data_uri_to_upload(str) do
parse_result =
str
|> URI.parse()
|> URL.Data.parse()
case parse_result do
%{data: {:error, _}} ->
:error
%{data: data, mediatype: content_type} ->
binary_to_upload(data, content_type)
end
end
def base64_to_upload(str, content_type) do
case Base.decode64(str) do
{:ok, data} -> binary_to_upload(data, content_type)
_ -> :error
end
end
def binary_to_upload(binary, content_type) do
file_extension = file_extension_for_content_type(content_type)
with {:ok, path} <- Plug.Upload.random_file("upload"),
{:ok, file} <- File.open(path, [:write, :binary]),
:ok <- IO.binwrite(file, binary),
:ok <- File.close(file) do
%Plug.Upload{
path: path,
content_type: content_type,
filename: "#{Path.basename(path)}#{file_extension}"
}
end
end
defp file_extension_for_content_type(content_type) do
case MIME.extensions(content_type) do
[] -> ""
[ext|_] -> ".#{ext}"
end
end
end

View file

@ -73,7 +73,7 @@ defmodule Legendary.CoreWeb.Helpers do
"""
end
defp do_styled_input_tag(type, input_helper, f, field, nil, opts, classes, error_classes) when type in [:date_select, :time_select, :datetime_select] do
defp do_styled_input_tag(type, _input_helper, f, field, nil, opts, classes, error_classes) when type in [:date_select, :time_select, :datetime_select] do
default_child_opts = [
month: [
class: "appearance-none border-b-2 border-dashed",

View file

@ -11,7 +11,7 @@ defmodule Legendary.Core.MixProject do
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
@ -142,7 +142,10 @@ defmodule Legendary.Core.MixProject do
{:dialyxir, "~> 1.0", only: [:dev], runtime: false},
{:ex_cldr, "~> 2.23.0"},
{:ex_doc, "~> 0.24", only: :dev, runtime: false},
{:ex_url, "~> 1.3.1"},
{:excoveralls, "~> 0.10", only: [:dev, :test]},
{:ex_aws, "~> 2.1.2"},
{:ex_aws_s3, "~> 2.0"},
{:fun_with_flags, "~> 1.6.0"},
{:fun_with_flags_ui, "~> 0.7.2"},
{:phoenix, "~> 1.6.0"},
@ -157,12 +160,14 @@ defmodule Legendary.Core.MixProject do
{:phoenix_live_dashboard, "~> 0.5.0"},
{:phoenix_pubsub, "~> 2.0"},
{:pow, "~> 1.0.25"},
{:sweet_xml, "~> 0.6"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 1.0"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:libcluster, "~> 3.3"},
{:plug_cowboy, "~> 2.0"},
{:waffle, "~> 1.1"},
]
end

View file

@ -0,0 +1,38 @@
defmodule Legendary.CoreWeb.Base64UploadsTest do
use Legendary.Core.DataCase
import Legendary.CoreWeb.Base64Uploads
@base64 "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAAXNSR0IArs4c6QAAAMJlWElmTU0AKgAAAAgABwESAAMAAAABAAEAAAEaAAUAAAABAAAAYgEbAAUAAAABAAAAagEoAAMAAAABAAIAAAExAAIAAAARAAAAcgEyAAIAAAAUAAAAhIdpAAQAAAABAAAAmAAAAAAAAABIAAAAAQAAAEgAAAABUGl4ZWxtYXRvciAzLjkuOAAAMjAyMTowOToyMiAxNTowOTowMQAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAAaADAAQAAAABAAAAAQAAAAAYjzhKAAAACXBIWXMAAAsTAAALEwEAmpwYAAADpmlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iCiAgICAgICAgICAgIHhtbG5zOnhtcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyI+CiAgICAgICAgIDx0aWZmOkNvbXByZXNzaW9uPjA8L3RpZmY6Q29tcHJlc3Npb24+CiAgICAgICAgIDx0aWZmOlJlc29sdXRpb25Vbml0PjI8L3RpZmY6UmVzb2x1dGlvblVuaXQ+CiAgICAgICAgIDx0aWZmOlhSZXNvbHV0aW9uPjcyPC90aWZmOlhSZXNvbHV0aW9uPgogICAgICAgICA8dGlmZjpZUmVzb2x1dGlvbj43MjwvdGlmZjpZUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgICAgPGV4aWY6UGl4ZWxYRGltZW5zaW9uPjE8L2V4aWY6UGl4ZWxYRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpDb2xvclNwYWNlPjE8L2V4aWY6Q29sb3JTcGFjZT4KICAgICAgICAgPGV4aWY6UGl4ZWxZRGltZW5zaW9uPjE8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8eG1wOkNyZWF0b3JUb29sPlBpeGVsbWF0b3IgMy45Ljg8L3htcDpDcmVhdG9yVG9vbD4KICAgICAgICAgPHhtcDpNb2RpZnlEYXRlPjIwMjEtMDktMjJUMTU6MDk6MDE8L3htcDpNb2RpZnlEYXRlPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4K/LzAdAAAAAxJREFUCB1j+P//PwAF/gL+n8otEwAAAABJRU5ErkJggg=="
@bad_base64 "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAApJREFUCNdjYAAAAAIAAeIhvDMAAAAASUVORK5CYII%3D"
describe "data_uri_to_upload/1" do
@png "data:image/png;base64,#{@base64}"
@bad_png "data:image/png;base64,#{@bad_base64}"
test "with a valid data URI" do
assert %Plug.Upload{} = data_uri_to_upload(@png)
end
test "with an invalid data URI" do
assert :error = data_uri_to_upload(@bad_png)
end
end
describe "base64_to_upload/2" do
test "makes an upload from a base64 string" do
assert %Plug.Upload{} = base64_to_upload(@base64, "image/png")
end
test "returns an error for invalid base64" do
assert :error = base64_to_upload(@bad_base64, "image/png")
end
end
describe "binary_to_upload/2" do
test "with a binary" do
binary = @base64 |> Base.decode64!()
assert %Plug.Upload{} = binary_to_upload(binary, "image/png")
end
end
end

View file

@ -0,0 +1,5 @@
[
import_deps: [:ecto],
inputs: ["*.{ex,exs}", "priv/*/seeds.exs", "{config,lib,test}/**/*.{ex,exs}"],
subdirectories: ["priv/*/migrations"]
]

33
apps/object_storage/.gitignore vendored Normal file
View file

@ -0,0 +1,33 @@
# The directory Mix will write compiled artifacts to.
/_build/
# If you run "mix test --cover", coverage assets end up here.
/cover/
# The directory Mix downloads your dependencies sources to.
/deps/
# Where 3rd-party dependencies like ExDoc output generated docs.
/doc/
# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch
# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump
# Also ignore archive artifacts (built via "mix archive.build").
*.ez
# Ignore package tarball (built via "mix hex.build").
object_storage-*.tar
# Ignore assets that are produced by build tools.
/priv/static/assets/
# Ignore digested assets cache.
/priv/static/cache_manifest.json
# In case you use Node.js/npm, you want to ignore these.
npm-debug.log
/assets/node_modules/

View file

@ -0,0 +1,3 @@
# Legendary.ObjectStorage
**TODO: Add description**

View file

@ -0,0 +1,3 @@
{
"baseUrl": "http://localhost:4002"
}

View file

@ -0,0 +1,5 @@
{
"name": "Using fixtures to represent data",
"email": "hello@cypress.io",
"body": "Fixtures are a great way to mock data for responses to routes"
}

View file

@ -0,0 +1,5 @@
describe('My First Test', () => {
it('Does not do much!', () => {
expect(true).to.equal(true)
})
})

View file

@ -0,0 +1,22 @@
/// <reference types="cypress" />
// ***********************************************************
// This example plugins/index.js can be used to load plugins
//
// You can change the location of this file or turn off loading
// the plugins file with the 'pluginsFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/plugins-guide
// ***********************************************************
// This function is called when a project is opened or re-opened (e.g. due to
// the project's config changing)
/**
* @type {Cypress.PluginConfig}
*/
// eslint-disable-next-line no-unused-vars
module.exports = (on, config) => {
// `on` is used to hook into various events Cypress emits
// `config` is the resolved Cypress config
}

View file

@ -0,0 +1,36 @@
// ***********************************************
// This example commands.js shows you how to
// create various custom commands and overwrite
// existing commands.
//
// For more comprehensive examples of custom
// commands please read more here:
// https://on.cypress.io/custom-commands
// ***********************************************
//
//
// -- This is a parent command --
// Cypress.Commands.add('login', (email, password) => { ... })
//
//
// -- This is a child command --
// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... })
//
//
// -- This is a dual command --
// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... })
//
//
// -- This will overwrite an existing command --
// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... })
Cypress.Commands.add("setupDB", (app, seedSet) => {
cy.request('POST', '/end-to-end/db/setup', {
app,
seed_set: seedSet,
}).as('setupDB')
})
Cypress.Commands.add("teardownDB", () => {
cy.request('POST', '/end-to-end/db/teardown').as('teardownDB')
})

View file

@ -0,0 +1,29 @@
// ***********************************************************
// This example support/index.js is processed and
// loaded automatically before your test files.
//
// This is a great place to put global configuration and
// behavior that modifies Cypress.
//
// You can change the location of this file or turn off
// automatically serving support files with the
// 'supportFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/configuration
// ***********************************************************
// Import commands.js using ES2015 syntax:
import './commands'
// Alternatively you can use CommonJS syntax:
// require('./commands')
before(() => {
// Make sure we don't have a rogue DB connection checked out
cy.teardownDB()
})
afterEach(() => {
cy.teardownDB()
})

View file

View file

@ -0,0 +1,11 @@
defmodule Legendary.ObjectStorage do
@moduledoc """
Legendary.ObjectStorage keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
def bucket_name, do: Application.get_env(:object_storage, :bucket_name)
end

View file

@ -0,0 +1,35 @@
defmodule Legendary.ObjectStorage.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
alias Legendary.ObjectStorageWeb.Endpoint
@impl true
def start(_type, _args) do
children = [
# Start the Telemetry supervisor
Legendary.ObjectStorageWeb.Telemetry,
# Start the Ecto repository
Legendary.ObjectStorage.Repo,
# Start the Endpoint (http/https)
Legendary.ObjectStorageWeb.Endpoint,
# Start the PubSub system
{Phoenix.PubSub, name: Legendary.ObjectStorage.PubSub}
# Start a worker by calling Legendary.ObjectStorage.Worker.start_link(arg)
# {ObjectStorage.Worker, arg}
]
Supervisor.start_link(children, strategy: :one_for_one, name: Legendary.ObjectStorage.Supervisor)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
@impl true
def config_change(changed, _new, removed) do
Endpoint.config_change(changed, removed)
:ok
end
end

View file

@ -0,0 +1,35 @@
defmodule Legendary.ObjectStorage.Object do
@moduledoc """
One object/file in the object storage app.
"""
use Ecto.Schema
import Ecto.Changeset
@acl_values [:private, :public_read]
schema "storage_objects" do
field :acl, Ecto.Enum, values: @acl_values
field :body, :binary
field :path, :string
timestamps()
end
@doc false
def changeset(object, attrs \\ %{}) do
object
|> cast(attrs, [:path, :body, :acl])
|> validate_required([:path, :acl])
|> validate_body_or_upload(attrs)
|> validate_inclusion(:acl, @acl_values, message: "is not supported. Valid values are #{@acl_values |> Enum.map(&Atom.to_string/1) |> Enum.join(",")}.")
end
defp validate_body_or_upload(changeset, attrs) do
case attrs do
%{uploads: "1"} ->
changeset
_ ->
validate_required(changeset, :body)
end
end
end

View file

@ -0,0 +1,28 @@
defmodule Legendary.ObjectStorage.ObjectChunk do
@moduledoc """
One chunk of a chunked upload.
"""
use Ecto.Schema
import Ecto.Changeset
schema "storage_object_chunks" do
field :body, :binary
field :part_number, :integer
field :path, :string
timestamps()
end
@doc false
def changeset(object_chunk, attrs) do
object_chunk
|> cast(attrs, [:path, :body, :part_number])
|> validate_required([:path, :body, :part_number])
end
def etag(chunk) do
key = "#{chunk.path}:#{chunk.part_number}:#{chunk.inserted_at}"
Base.encode16(:crypto.hash(:md5 , key))
end
end

View file

@ -0,0 +1,145 @@
defmodule Legendary.ObjectStorage.Objects do
@moduledoc """
The Objects context.
"""
import Ecto.Query, warn: false
alias Legendary.ObjectStorage.{Object, ObjectChunk}
alias Legendary.ObjectStorage.Repo
@doc """
Gets a single object.
Raises if the Object does not exist.
## Examples
iex> get_object!(123)
%Object{}
"""
@spec get_object(binary) :: {:ok, Object.t()} | {:error, :not_found}
def get_object(path) do
from(
obj in Object,
where: obj.path == ^path
)
|> Repo.one()
|> case do
nil -> {:error, :not_found}
%Object{} = object -> {:ok, object}
end
end
@spec get_or_initialize_object(binary) :: Object.t()
def get_or_initialize_object(path) do
case get_object(path) do
{:ok, object} ->
object
{:error, :not_found} ->
%Object{}
end
end
@doc """
Updates a object.
## Examples
iex> update_object(object, %{field: new_value})
{:ok, %Object{}}
iex> update_object(object, %{field: bad_value})
{:error, ...}
"""
@spec update_object(Object.t(), Map.t()) :: {:ok, Object.t()} | {:error, Ecto.Changeset.t()}
def update_object(%Object{} = object, attrs) do
object
|> Object.changeset(attrs)
|> Repo.insert_or_update()
end
def put_chunk(%{path: path}, %{part_number: part_number, body: body}) do
%ObjectChunk{}
|> ObjectChunk.changeset(%{
path: path,
part_number: part_number,
body: body
})
|> Repo.insert(conflict_target: [:path, :part_number], on_conflict: {:replace, [:body]})
end
def finalize_chunked_upload(%Object{path: path}, request_etags) do
chunk_query =
from(chunk in ObjectChunk, where: chunk.path == ^path)
part_number_range =
chunk_query
|> select([c], [
min_chunk: fragment("min(part_number)"),
max_chunk: fragment("max(part_number)"),
chunk_count: fragment("count(part_number)")
])
|> Repo.one()
|> Enum.into(%{})
Ecto.Multi.new()
|> Ecto.Multi.run(:check_chunks, fn _repo, _ ->
case part_number_range do
%{min_chunk: 1, max_chunk: max_chunk, chunk_count: max_chunk} ->
{:ok, part_number_range}
_ ->
{:error, "Missing chunks for chunked upload. Aborting."}
end
end)
|> Ecto.Multi.run(:check_etags, fn _repo, _ ->
db_etags =
chunk_query
|> Repo.all()
|> Enum.map(&ObjectChunk.etag/1)
|> MapSet.new()
if db_etags == MapSet.new(request_etags) do
{:ok, request_etags}
else
{:error, "ETags in request do not match parts in database."}
end
end)
|> Ecto.Multi.update_all(:update_object_body, fn %{} ->
from(
object in Object,
where: object.path == ^path,
join: new_body in fragment("""
SELECT string_agg(body, '') as body
FROM (
SELECT body
FROM storage_object_chunks
WHERE path = ?
ORDER BY part_number ASC
) as body_pieces
""", ^path),
update: [set: [body: new_body.body]]
)
end, [])
|> Ecto.Multi.delete_all(:remove_chunks, chunk_query)
|> Repo.transaction()
end
@doc """
Deletes a Object.
## Examples
iex> delete_object(object)
{:ok, %Object{}}
iex> delete_object(object)
{:error, ...}
"""
def delete_object(%Object{} = object) do
Repo.delete(object)
end
end

View file

@ -0,0 +1,5 @@
defmodule Legendary.ObjectStorage.Repo do
use Ecto.Repo,
otp_app: :object_storage,
adapter: Ecto.Adapters.Postgres
end

View file

@ -0,0 +1,105 @@
defmodule Legendary.ObjectStorageWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use Legendary.ObjectStorageWeb, :controller
use Legendary.ObjectStorageWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: Legendary.ObjectStorageWeb
import Plug.Conn
import Legendary.ObjectStorageWeb.Gettext
import Legendary.ObjectStorageWeb.Helpers
alias Legendary.ObjectStorageWeb.Router.Helpers, as: Routes
plug :parse_body
end
end
def view do
quote do
use Phoenix.View,
root: "lib/object_storage_web/templates",
namespace: Legendary.ObjectStorageWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {Legendary.ObjectStorageWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import Legendary.ObjectStorageWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import Legendary.ObjectStorageWeb.ErrorHelpers
import Legendary.ObjectStorageWeb.Gettext
alias Legendary.ObjectStorageWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end

View file

@ -0,0 +1,81 @@
defmodule Legendary.ObjectStorageWeb.ChunkedUploadController do
use Legendary.ObjectStorageWeb, :controller
alias Ecto.Changeset
alias Legendary.ObjectStorage.Objects
plug :put_view, Legendary.ObjectStorageWeb.UploadView
plug Legendary.ObjectStorageWeb.CheckSignatures when action not in [:show]
def chunked_upload(conn, %{"path" => path_parts, "uploads" => "1"}), do: start(conn, path_parts)
def chunked_upload(conn, %{"path" => path_parts, "uploadId" => id}) when is_binary(id), do: finalize(conn, path_parts, id)
defp start(conn, path_parts) do
path = Enum.join(path_parts, "/")
attrs = %{
path: path,
acl: get_first_request_header(conn, "x-amz-acl", "private"),
uploads: "1"
}
object = Objects.get_or_initialize_object(path)
case Objects.update_object(object, attrs) do
{:ok, updated_object} ->
render(conn, "initiate_multipart_upload.xml", %{object: updated_object})
{:error, %Changeset{} = changeset} ->
conn
|> put_status(:bad_request)
|> render("error.xml", changeset: changeset)
end
end
defp finalize(conn, path_parts, _id) do
path = Enum.join(path_parts, "/")
with {:ok, object} <- Objects.get_object(path),
{:ok, etags} <- extract_etags(conn),
{:ok, _} <- Objects.finalize_chunked_upload(object, etags) do
send_resp(conn, :ok, "")
else
{:error, :not_found} ->
conn
|> put_status(:not_found)
|> render("not_found.xml")
{:error, message} ->
conn
|> put_status(:bad_request)
|> render("error.xml", message: message, code: "InvalidPart", path: path)
{:error, _, message, _} ->
conn
|> put_status(:bad_request)
|> render("error.xml", message: message, code: "InvalidPart", path: path)
end
end
defp extract_etags(%{assigns: %{body: body}}) do
xpath =
%SweetXpath{
path: '//Part/ETag/text()',
is_value: true,
cast_to: false,
is_list: true,
is_keyword: false
}
try do
{:ok,
body
|> SweetXml.parse(quiet: true)
|> SweetXml.xpath(xpath)
|> Enum.map(&to_string/1)
}
catch
:exit, _ ->
{:error, "Missing etags for chunked upload."}
end
end
defp extract_etags(_), do: {:error, "Missing etags for chunked upload."}
end

View file

@ -0,0 +1,16 @@
defmodule Legendary.ObjectStorageWeb.FallbackController do
@moduledoc """
Translates controller action results into valid `Plug.Conn` responses.
See `Phoenix.Controller.action_fallback/1` for more details.
"""
use Legendary.ObjectStorageWeb, :controller
# This clause is an example of how to handle resources that cannot be found.
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> put_view(ObjectStorageWeb.ErrorView)
|> render(:"404")
end
end

View file

@ -0,0 +1,106 @@
defmodule Legendary.ObjectStorageWeb.UploadController do
use Legendary.ObjectStorageWeb, :controller
alias Ecto.Changeset
alias Legendary.ObjectStorage.Objects
alias Legendary.ObjectStorage.{Object, ObjectChunk}
alias Legendary.ObjectStorageWeb.CheckSignatures
action_fallback ObjectStorageWeb.FallbackController
plug CheckSignatures when action not in [:show]
def show(conn, %{"path" => path_parts}) do
case Objects.get_object(Enum.join(path_parts, "/")) do
{:ok, %{acl: :public_read} = object} ->
conn
|> put_resp_content_type(MIME.from_path(object.path) , "binary")
|> send_resp(200, object.body)
{:ok, %{acl: :private} = object} ->
conn_checked = CheckSignatures.call(conn)
if conn_checked.halted do
conn
|> put_status(:not_found)
|> render("not_found.xml")
else
conn
|> put_resp_content_type(MIME.from_path(object.path) , "binary")
|> send_resp(200, object.body)
end
{:error, :not_found} ->
conn
|> put_status(:not_found)
|> render("not_found.xml")
end
end
def put_object(
conn,
%{"path" => path_parts, "uploadId" => _id, "partNumber" => part_number}
) when is_binary(part_number), do: put_chunk(conn, path_parts, part_number)
def put_object(conn, %{"path" => path_parts}), do: do_put_object(conn, path_parts)
def delete_object(conn, %{"path" => path_parts}) do
with {:ok, object} <- Objects.get_object(Enum.join(path_parts, "/")),
{:ok, %Object{}} <- Objects.delete_object(object)
do
send_resp(conn, :no_content, "")
else
{:error, :not_found} ->
conn
|> put_status(:not_found)
|> render("not_found.xml")
end
end
defp put_chunk(conn, path_parts, part_number) do
path = Enum.join(path_parts, "/")
attrs = %{
body: conn.assigns.body,
part_number: part_number
}
with {:ok, object} <- Objects.get_object(path),
{:ok, chunk} <- Objects.put_chunk(object, attrs)
do
conn
|> put_resp_header("etag", ObjectChunk.etag(chunk))
|> send_resp(:ok, "")
else
{:error, :not_found} ->
conn
|> put_status(:not_found)
|> render("not_found.xml")
{:error, %Changeset{} = changeset} ->
conn
|> put_status(:bad_request)
|> render("error.xml", changeset: changeset)
end
end
defp do_put_object(conn, path_parts) do
path = Enum.join(path_parts, "/")
attrs = %{
path: path,
body: conn.assigns.body,
acl: get_first_request_header(conn, "x-amz-acl", "private"),
}
object = Objects.get_or_initialize_object(path)
case Objects.update_object(object, attrs) do
{:ok, _} ->
conn
|> put_resp_content_type("application/text")
|> send_resp(:ok, "")
{:error, %Changeset{} = changeset} ->
conn
|> put_status(:bad_request)
|> render("error.xml", changeset: changeset)
end
end
end

View file

@ -0,0 +1,40 @@
defmodule Legendary.ObjectStorageWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :object_storage
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_object_storage_web_key",
signing_salt: "bsLgw5QW"
]
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :object_storage
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug Legendary.ObjectStorageWeb.Router
end

View file

@ -0,0 +1,24 @@
defmodule Legendary.ObjectStorageWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Legendary.ObjectStorageWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :object_storage
end

View file

@ -0,0 +1,34 @@
defmodule Legendary.ObjectStorageWeb.Helpers do
@moduledoc """
Utility functions which are used throughout ObjectStorageWeb.
"""
alias Plug.Conn
def get_first_request_header(conn, key, default \\ nil) do
case Conn.get_req_header(conn, key) do
[] -> default
[hd|_] -> hd
end
end
def parse_body(conn, _opts) do
{:ok, body, conn} =
conn
|> Conn.read_body()
Conn.assign(conn, :body, body)
end
def amz_date_parse(date_string) do
format = ~r/^([0-9]{4})([0-9]{2})([0-9]{2})T([0-9]{2})([0-9]{2})([0-9]{2})Z/
[_|parts] = Regex.run(format, date_string)
[year, month, day, hour, minute, second] =
parts
|> Enum.map(&Integer.parse/1)
|> Enum.map(fn {int, _} -> int end)
{{year, month, day}, {hour, minute, second}}
end
end

View file

@ -0,0 +1,70 @@
defmodule Legendary.ObjectStorageWeb.CheckSignatures do
@moduledoc """
A plug for checking authorization signatures either in the headers or query params.
"""
@behaviour Plug
import Plug.Conn
import Legendary.ObjectStorageWeb.Helpers, only: [get_first_request_header: 2, amz_date_parse: 1]
alias Legendary.ObjectStorageWeb.CheckSignatures.SignatureGenerator
def init(opts) do
opts
end
def call(conn, _opts \\ []) do
with true <- fresh_request(conn),
{:ok, correct_signature} <- signature_generator().correct_signature_for_conn(conn),
{:ok, actual_signature} <- actual_signature_for_conn(conn),
true <- Plug.Crypto.secure_compare(correct_signature, actual_signature)
do
conn
else
_ ->
conn
|> send_resp(:forbidden, "Forbidden")
|> halt()
end
end
def actual_signature_for_conn(%{query_params: %{"X-Amz-Signature" => actual_signature}}) do
{:ok, actual_signature}
end
def actual_signature_for_conn(conn) do
%{"Signature" => actual_signature} =
conn
|> get_first_request_header("authorization")
|> signature_generator().parse_authorization_header()
{:ok, actual_signature}
end
defp signature_generator() do
Application.get_env(:object_storage, :signature_generator, SignatureGenerator)
end
@one_week 60 * 60 * 24 * 7
defp fresh_request(%{
query_params: %{
"X-Amz-Expires" => expires_in,
"X-Amz-Date" => request_timestamp,
}
}) when is_integer(expires_in) do
request_epoch =
request_timestamp
|> amz_date_parse()
|> :calendar.datetime_to_gregorian_seconds()
now_epoch =
:calendar.universal_time()
|> :calendar.datetime_to_gregorian_seconds()
request_age = now_epoch - request_epoch
request_age < expires_in && expires_in < @one_week
end
defp fresh_request(_), do: true
end

View file

@ -0,0 +1,103 @@
defmodule Legendary.ObjectStorageWeb.CheckSignatures.SignatureGenerator do
@moduledoc """
Can generate a signature based on an incoming request so that it can be verified
against the signature header or parameter submitted.
"""
import Legendary.ObjectStorageWeb.Helpers, only: [get_first_request_header: 2, amz_date_parse: 1]
alias ExAws.{
Auth,
Auth.Credentials,
Auth.Signatures,
Request.Url
}
alias ExAws.Auth.Utils, as: AuthUtils
alias ExAws.S3.Utils, as: S3Utils
alias Plug.Conn
@callback correct_signature_for_conn(Conn.t()) :: {:ok, String.t()}
@callback parse_authorization_header(String.t()) :: Map.t()
@signature_in_query_pattern ~r/(&X-Amz-Signature=[0-9a-fA-F]+)|(X-Amz-Signature=[0-9a-fA-F]+&)/
def correct_signature_for_conn(conn) do
config = ExAws.Config.new(:s3)
url = url_to_sign(conn, config)
sanitized_query_string = Regex.replace(@signature_in_query_pattern, conn.query_string, "")
{:ok, signature(
conn.method |> String.downcase() |> String.to_atom(),
url,
sanitized_query_string,
filtered_headers(conn),
body_for_request(conn),
conn |> request_datetime() |> amz_date_parse(),
config
)}
end
def parse_authorization_header(header) do
["AWS4-HMAC-SHA256", params] = String.split(header, " ")
params
|> String.split(",")
|> Enum.map(& String.split(&1, "="))
|> Enum.map(fn [k, v] -> {k, v} end)
|> Enum.into(%{})
end
defp url_to_sign(%{params: %{"path" => path_parts}}, config) do
object =
path_parts
|> Enum.join("/")
|> S3Utils.ensure_slash()
bucket = Application.get_env(:object_storage, :bucket_name)
port = S3Utils.sanitized_port_component(config)
"#{config[:scheme]}#{config[:host]}#{port}/#{bucket}#{object}"
end
defp request_datetime(%{params: %{"X-Amz-Date" => datetime}}), do: datetime
defp request_datetime(conn), do: get_first_request_header(conn, "x-amz-date")
defp filtered_headers(conn) do
signed_header_keys =
case conn.params do
%{"X-Amz-SignedHeaders" => signed_header_string} ->
signed_header_string
_ ->
conn
|> get_first_request_header("authorization")
|> parse_authorization_header()
|> Map.get("SignedHeaders")
end
|> String.split(";")
Enum.filter(conn.req_headers, fn {k, _v} -> k in signed_header_keys end)
end
# Presigned URL, so do not include body (unknown when presigning) to sig calc
defp body_for_request(%{params: %{"X-Amz-Signature" => _}}), do: nil
# Otherwise, include body
defp body_for_request(%{assigns: %{body: body}}), do: body
defp signature(http_method, url, query, headers, body, datetime, config) do
path = url |> Url.get_path(:s3) |> Url.uri_encode()
request = Auth.build_canonical_request(http_method, path, query, headers, body)
string_to_sign = string_to_sign(request, :s3, datetime, config)
Signatures.generate_signature_v4("s3", config, datetime, string_to_sign)
end
defp string_to_sign(request, service, datetime, config) do
request = AuthUtils.hash_sha256(request)
"""
AWS4-HMAC-SHA256
#{AuthUtils.amz_date(datetime)}
#{Credentials.generate_credential_scope_v4(service, config, datetime)}
#{request}
"""
|> String.trim_trailing()
end
end

View file

@ -0,0 +1,39 @@
defmodule Legendary.ObjectStorageWeb.Router do
use Legendary.ObjectStorageWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {Legendary.ObjectStorageWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
use Legendary.ObjectStorageWeb.Routes
# Other scopes may use custom stacks.
# scope "/api", Legendary.ObjectStorageWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: Legendary.ObjectStorageWeb.Telemetry
end
end
end

View file

@ -0,0 +1,21 @@
defmodule Legendary.ObjectStorageWeb.Routes do
@moduledoc """
Routes for the object storage engine.
"""
import Legendary.ObjectStorage, only: [bucket_name: 0]
defmacro __using__(_opts \\ []) do
quote do
scope "/", Legendary.ObjectStorageWeb do
pipe_through :api
get "/#{bucket_name()}/*path", UploadController, :show
put "/#{bucket_name()}/*path", UploadController, :put_object
delete "/#{bucket_name()}/*path", UploadController, :delete_object
post "/#{bucket_name()}/*path", ChunkedUploadController, :chunked_upload
end
end
end
end

View file

@ -0,0 +1,74 @@
defmodule Legendary.ObjectStorageWeb.Telemetry do
@moduledoc """
Metric definitions for the object storage app.
"""
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("object_storage_web.repo.query.total_time",
unit: {:native, :millisecond},
description: "The sum of the other measurements"
),
summary("object_storage_web.repo.query.decode_time",
unit: {:native, :millisecond},
description: "The time spent decoding the data received from the database"
),
summary("object_storage_web.repo.query.query_time",
unit: {:native, :millisecond},
description: "The time spent executing the query"
),
summary("object_storage_web.repo.query.queue_time",
unit: {:native, :millisecond},
description: "The time spent waiting for a database connection"
),
summary("object_storage_web.repo.query.idle_time",
unit: {:native, :millisecond},
description:
"The time the connection spent waiting before being checked out for the query"
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {ObjectStorageWeb, :count_users, []}
]
end
end

View file

@ -0,0 +1,5 @@
<main class="container">
<p class="alert alert-info" role="alert"><%= get_flash(@conn, :info) %></p>
<p class="alert alert-danger" role="alert"><%= get_flash(@conn, :error) %></p>
<%= @inner_content %>
</main>

View file

@ -0,0 +1,11 @@
<main class="container">
<p class="alert alert-info" role="alert"
phx-click="lv:clear-flash"
phx-value-key="info"><%= live_flash(@flash, :info) %></p>
<p class="alert alert-danger" role="alert"
phx-click="lv:clear-flash"
phx-value-key="error"><%= live_flash(@flash, :error) %></p>
<%= @inner_content %>
</main>

View file

@ -0,0 +1,30 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=edge"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<%= csrf_meta_tag() %>
<%= live_title_tag assigns[:page_title] || "ObjectStorageWeb", suffix: " · Phoenix Framework" %>
<link phx-track-static rel="stylesheet" href={Routes.static_path(@conn, "/assets/app.css")}/>
<script defer phx-track-static type="text/javascript" src={Routes.static_path(@conn, "/assets/app.js")}></script>
</head>
<body>
<header>
<section class="container">
<nav>
<ul>
<li><a href="https://hexdocs.pm/phoenix/overview.html">Get Started</a></li>
<%= if function_exported?(Routes, :live_dashboard_path, 2) do %>
<li><%= link "LiveDashboard", to: Routes.live_dashboard_path(@conn, :home) %></li>
<% end %>
</ul>
</nav>
<a href="https://phoenixframework.org/" class="phx-logo">
<img src={Routes.static_path(@conn, "/images/phoenix.png")} alt="Phoenix Framework Logo"/>
</a>
</section>
</header>
<%= @inner_content %>
</body>
</html>

View file

@ -0,0 +1,41 @@
<section class="phx-hero">
<h1><%= gettext "Welcome to %{name}!", name: "Phoenix" %></h1>
<p>Peace of mind from prototype to production</p>
</section>
<section class="row">
<article class="column">
<h2>Resources</h2>
<ul>
<li>
<a href="https://hexdocs.pm/phoenix/overview.html">Guides &amp; Docs</a>
</li>
<li>
<a href="https://github.com/phoenixframework/phoenix">Source</a>
</li>
<li>
<a href="https://github.com/phoenixframework/phoenix/blob/v1.6/CHANGELOG.md">v1.6 Changelog</a>
</li>
</ul>
</article>
<article class="column">
<h2>Help</h2>
<ul>
<li>
<a href="https://elixirforum.com/c/phoenix-forum">Forum</a>
</li>
<li>
<a href="https://web.libera.chat/#elixir">#elixir on Libera Chat (IRC)</a>
</li>
<li>
<a href="https://twitter.com/elixirphoenix">Twitter @elixirphoenix</a>
</li>
<li>
<a href="https://elixir-slackin.herokuapp.com/">Elixir on Slack</a>
</li>
<li>
<a href="https://discord.gg/elixir">Elixir on Discord</a>
</li>
</ul>
</article>
</section>

View file

@ -0,0 +1,19 @@
defmodule ObjectStorageWeb.ChangesetView do
use Legendary.ObjectStorageWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`ObjectStorageWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end

View file

@ -0,0 +1,47 @@
defmodule Legendary.ObjectStorageWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(ObjectStorageWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(ObjectStorageWeb.Gettext, "errors", msg, opts)
end
end
end

View file

@ -0,0 +1,16 @@
defmodule Legendary.ObjectStorageWeb.ErrorView do
use Legendary.ObjectStorageWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end

View file

@ -0,0 +1,7 @@
defmodule Legendary.ObjectStorageWeb.LayoutView do
use Legendary.ObjectStorageWeb, :view
# Phoenix LiveDashboard is available only in development by default,
# so we instruct Elixir to not warn if the dashboard route is missing.
@compile {:no_warn_undefined, {Routes, :live_dashboard_path, 2}}
end

View file

@ -0,0 +1,3 @@
defmodule Legendary.ObjectStorageWeb.PageView do
use Legendary.ObjectStorageWeb, :view
end

View file

@ -0,0 +1,62 @@
defmodule Legendary.ObjectStorageWeb.UploadView do
use Legendary.ObjectStorageWeb, :view
alias Ecto.Changeset
alias Legendary.ObjectStorage
def render("initiate_multipart_upload.xml", %{object: object}) do
~E"""
<?xml version="1.0" encoding="UTF-8"?>
<InitiateMultipartUploadResult>
<Bucket><%= ObjectStorage.bucket_name() %></Bucket>
<Key><%= object.path %></Key>
<UploadId><%= object.id %></UploadId>
</InitiateMultipartUploadResult>
"""
|> safe_to_string()
end
def render("error.xml", assigns) do
errors =
case assigns do
%{message: message} -> message
%{changeset: changeset} ->
changeset.errors
|> Enum.map(fn {key, {message, _}} ->
"#{key}: #{message}"
end)
|> Enum.join(", ")
end
code = Map.get(assigns, :code, "InvalidArgument")
path =
case assigns do
%{changeset: changeset} ->
Changeset.get_field(changeset, :path)
%{path: path} ->
path
end
~E"""
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code><%= code %></Code>
<Message><%= errors %></Message>
<Resource><%= path %></Resource>
<RequestId>DEADBEEF</RequestId>
</Error>
"""
|> safe_to_string()
end
def render("not_found.xml", _assigns) do
~E"""
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>NoSuchKey</Code>
</Error>
"""
|> safe_to_string()
end
end

View file

@ -0,0 +1,80 @@
defmodule Legendary.ObjectStorage.MixProject do
use Mix.Project
@version "4.2.0"
def project do
[
app: :object_storage,
version: @version,
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test],
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Legendary.ObjectStorage.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:core, in_umbrella: true},
{:credo, "~> 1.4", only: [:dev, :test], runtime: false},
{:ecto_sql, "~> 3.6"},
{:ex_aws, "~> 2.0"},
{:ex_aws_s3, "~> 2.0"},
{:floki, ">= 0.30.0", only: :test},
{:gettext, "~> 0.18"},
{:hackney, "~> 1.9"},
{:jason, "~> 1.2"},
{:mox, "~> 1.0", only: :test},
{:phoenix, "~> 1.6.0"},
{:phoenix_ecto, "~> 4.4"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_dashboard, "~> 0.5"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.16.0"},
{:phoenix_pubsub, "~> 2.0"},
{:plug_cowboy, "~> 2.5"},
{:postgrex, ">= 0.0.0"},
{:sweet_xml, "~> 0.7.1"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"npm.install": [],
setup: ["deps.get", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end

View file

@ -0,0 +1,4 @@
[
import_deps: [:ecto_sql],
inputs: ["*.exs"]
]

View file

@ -0,0 +1,15 @@
defmodule ObjectStorage.Repo.Migrations.CreateStorageObjects do
use Ecto.Migration
def change do
create table(:storage_objects) do
add :path, :string
add :body, :binary
add :acl, :string
timestamps()
end
create unique_index(:storage_objects, :path)
end
end

View file

@ -0,0 +1,15 @@
defmodule ObjectStorage.Repo.Migrations.CreateStorageObjectChunk do
use Ecto.Migration
def change do
create table(:storage_object_chunks) do
add :path, :string
add :body, :binary
add :part_number, :integer
timestamps()
end
create unique_index(:storage_object_chunks, [:path, :part_number])
end
end

View file

@ -0,0 +1,11 @@
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Legendary.ObjectStorage.Repo.insert!(%ObjectStorage.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.

View file

View file

@ -0,0 +1,33 @@
defmodule Legendary.ObjectStorage.ObjectChunkTest do
use Legendary.ObjectStorage.DataCase
import Legendary.ObjectStorage.ObjectChunk
alias Legendary.ObjectStorage.ObjectChunk
describe "changeset/2" do
test "requires path, part_number, and body" do
chunk = %ObjectChunk{path: "test.txt", part_number: 1, body: "hello!"}
assert changeset(chunk, %{}).valid?
refute changeset(chunk, %{path: nil}).valid?
refute changeset(chunk, %{body: nil}).valid?
refute changeset(chunk, %{part_number: nil}).valid?
end
end
describe "etag/1" do
test "is the same if the path, part, and inserted_at are same" do
chunk = %{path: "hello-world", part_number: 1, inserted_at: DateTime.utc_now()}
assert etag(chunk) == etag(chunk)
end
test "is different if the path, part, or inserted_at are different" do
chunk = %{path: "hello-world", part_number: 1, inserted_at: DateTime.utc_now()}
refute etag(chunk) == etag(%{chunk | path: "bye-for-now"})
refute etag(chunk) == etag(%{chunk | part_number: 2})
refute etag(chunk) == etag(%{chunk | inserted_at: DateTime.utc_now() |> DateTime.add(10)})
end
end
end

View file

@ -0,0 +1,17 @@
defmodule Legendary.ObjectStorage.ObjectTest do
use Legendary.ObjectStorage.DataCase
import Legendary.ObjectStorage.Object
alias Legendary.ObjectStorage.Object
describe "changeset/2" do
test "does not require body for multipart uploads" do
assert changeset(%Object{}, %{acl: "public_read", path: "test", uploads: "1"}) .valid?
end
test "requires a body if single part upload" do
refute changeset(%Object{}, %{acl: "public_read", path: "test"}).valid?
end
end
end

View file

@ -0,0 +1,97 @@
defmodule Legendary.ObjectStorage.ObjectsTest do
use Legendary.ObjectStorage.DataCase
alias Legendary.ObjectStorage.{Object, ObjectChunk, Objects}
test "get_object/1 returns the object with given id" do
object =
%Object{path: "hello.txt"}
|> Repo.insert!()
assert Objects.get_object(object.path) == {:ok, object}
end
describe "get_or_initialize_object/1" do
test "finds objects by path" do
object =
%Object{path: "hello.txt"}
|> Repo.insert!()
assert %{path: "hello.txt"} = Objects.get_or_initialize_object(object.path)
end
test "returns a blank Object if no object with the path exists" do
assert %{body: nil, acl: nil} = Objects.get_or_initialize_object("bad-path")
end
end
test "update_object/2 with valid data updates the object" do
update_attrs = %{
path: "test.txt",
body: "Hello, world!",
acl: "private"
}
assert {:ok, %Object{} = object} = Objects.update_object(%Object{}, update_attrs)
assert object.path == "test.txt"
assert object.body == "Hello, world!"
assert object.acl == :private
end
test "update_object/2 with invalid data returns error changeset" do
object =
%Object{path: "test.txt", body: "Hello, world!"}
|> Repo.insert!()
assert {:error, %Ecto.Changeset{}} = Objects.update_object(object, %{body: ""})
assert {:ok, object} == Objects.get_object(object.path)
assert object.body == "Hello, world!"
end
describe "put_chunk/2" do
test "adds a chunk to an object" do
result = Objects.put_chunk(%{path: "hello-world.txt"}, %{part_number: 1, body: "Hello,"})
assert {:ok, %ObjectChunk{part_number: 1, body: "Hello,", path: "hello-world.txt"}} = result
end
end
describe "finalized_chunked_upload/2" do
test "with contiguous chunks" do
object = Repo.insert!(%Object{path: "hello-world.txt", acl: :public_read})
chunk1 = Repo.insert!(%ObjectChunk{path: "hello-world.txt", part_number: 1, body: "Hello, "})
chunk2 = Repo.insert!(%ObjectChunk{path: "hello-world.txt", part_number: 2, body: "world!"})
etags = [ObjectChunk.etag(chunk1), ObjectChunk.etag(chunk2)]
assert {
:ok,
%{
update_object_body: {updated_objects_count, nil},
remove_chunks: {removed_chunks_count, nil}
}
} = Objects.finalize_chunked_upload(object, etags)
assert updated_objects_count == 1
assert removed_chunks_count == 2
assert {:ok, %Object{body: body}} = Objects.get_object("hello-world.txt")
assert body == "Hello, world!"
end
test "with gap in chunks" do
object = Repo.insert!(%Object{path: "hello-world.txt", acl: :public_read})
_chunk1 = Repo.insert!(%ObjectChunk{path: "hello-world.txt", part_number: 1, body: "Hell"})
_chunk3 = Repo.insert!(%ObjectChunk{path: "hello-world.txt", part_number: 3, body: " world!"})
assert {
:error,
:check_chunks,
"Missing chunks for chunked upload. Aborting.",
_
} = Objects.finalize_chunked_upload(object, [])
end
end
test "delete_object/1 deletes the object" do
object =
%Object{path: "test.txt"}
|> Repo.insert!()
assert {:ok, %Object{path: "test.txt"}} = Objects.delete_object(object)
assert {:error, :not_found} = Objects.get_object(object.path)
end
end

View file

@ -0,0 +1,11 @@
defmodule Legendary.ObjectStorage.Test do
use Legendary.ObjectStorage.DataCase
alias Legendary.ObjectStorage
describe "bucket_name/0" do
test "returns the bucket name" do
assert ObjectStorage.bucket_name() == "uploads"
end
end
end

View file

@ -0,0 +1,75 @@
defmodule Legendary.ObjectStorageWeb.ChunkedUploadControllerTest do
use Legendary.ObjectStorageWeb.ConnCase
alias Legendary.ObjectStorage.{Object, ObjectChunk, Repo}
setup do
expect_signature_checks_and_pass()
end
def post_request(conn, path, opts \\ []) do
content_type = Keyword.get(opts, :content_type, "text/plain")
acl = Keyword.get(opts, :acl, "public_read")
params = Keyword.get(opts, :params, %{})
body = Keyword.get(opts, :body)
conn
|> put_req_header("x-amz-acl", acl)
|> put_req_header("content-type", content_type)
|> post(Routes.chunked_upload_path(conn, :chunked_upload, path, params), body)
end
describe "start" do
test "initiates an upload with proper variables", %{conn: conn} do
conn = post_request(conn, ["new-multipart-upload"], params: %{"uploads" => "1"})
assert response(conn, 200)
end
test "return 400 Bad Request with a wrong ACL", %{conn: conn} do
conn = post_request(conn, ["new-multipart-upload"], acl: "wrong", params: %{"uploads" => "1"})
assert response(conn, 400)
end
end
describe "finalize" do
test "finalizes an upload by pass", %{conn: conn} do
Repo.insert!(%Object{path: "new-multipart-upload", acl: :public_read})
chunk = Repo.insert!(%ObjectChunk{path: "new-multipart-upload", part_number: 1})
conn =
post_request(
conn,
["new-multipart-upload"],
params: %{"uploadId" => "1"},
body: """
<?xml version="1.0" encoding="UTF-8"?>
<CompleteMultipartUpload xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Part>
<ETag>#{ObjectChunk.etag(chunk)}</ETag>
<PartNumber>1</PartNumber>
</Part>
...
</CompleteMultipartUpload>
"""
)
assert response(conn, 200)
end
test "returns a 400 Bad Request if chunks are missing", %{conn: conn} do
Repo.insert!(%Object{path: "new-multipart-upload", acl: :public_read})
conn = post_request(conn, ["new-multipart-upload"], params: %{"uploadId" => "1"})
assert response(conn, 400)
end
test "returns a 404 if no such object exists", %{conn: conn} do
conn = post_request(conn, ["new-multipart-upload"], params: %{"uploadId" => "1"})
assert response(conn, 404)
end
end
end

View file

@ -0,0 +1,106 @@
defmodule ObjectStorageWeb.UploadControllerTest do
use Legendary.ObjectStorageWeb.ConnCase
alias Legendary.ObjectStorage.{Object, Repo}
def put_request(conn, path, acl, body, params \\ %{}, content_type \\ "text/plain") do
conn
|> put_req_header("x-amz-acl", acl)
|> put_req_header("content-type", content_type)
|> put(
Routes.upload_path(conn, :put_object, path, params),
body
)
end
describe "show object" do
test "returns 404 if the object is private and sig check fails", %{conn: conn} do
expect_signature_checks_and_fail()
Repo.insert!(%Object{path: "secret.txt", acl: :private, body: "Ssh!"})
conn = get(conn, Routes.upload_path(conn, :show, ["secret.txt"]))
assert response(conn, 404)
assert response_content_type(conn, :xml)
end
test "returns the object if the object is private but the sig check passes", %{conn: conn} do
expect_signature_checks_and_pass()
Repo.insert!(%Object{path: "secret.txt", acl: :private, body: "Ssh!"})
conn = get(conn, Routes.upload_path(conn, :show, ["secret.txt"]))
assert text_response(conn, 200) == "Ssh!"
end
end
describe "put object" do
setup do
expect_signature_checks_and_pass()
end
test "renders object when data is valid", %{conn: conn} do
conn = put_request(conn, ["test.txt"], "public_read", "Hello, world!")
assert text_response(conn, 200) == ""
conn = get(conn, Routes.upload_path(conn, :show, ["test.txt"]))
assert "Hello, world!" = text_response(conn, 200)
end
test "renders errors when data is invalid", %{conn: conn} do
conn = put_request(conn, ["test.txt"], "bad_acl", "Hello, world!")
assert response(conn, 400) =~ "<Code>InvalidArgument</Code>"
end
end
describe "put chunk" do
setup [:create_object]
setup do
expect_signature_checks_and_pass()
end
test "can put a chunk if you give a part number", %{conn: conn, object: object} do
conn = put_request(conn, [object.path], "public_read", "Hello, world!", %{"partNumber" => 1, "uploadId" => 1})
assert response(conn, 200)
end
test "returns a 404 if the path is wrong", %{conn: conn} do
conn = put_request(conn, ["wrong"], "public_read", "Hello, world!", %{"partNumber" => 1, "uploadId" => 1})
assert response(conn, 404)
end
test "returns a 400 Bad Request if the body is missing", %{conn: conn, object: object} do
conn = put_request(conn, [object.path], "public_read", nil, %{"partNumber" => 1, "uploadId" => 1})
assert response(conn, 400) =~ "<Code>InvalidArgument</Code>"
end
end
describe "delete object" do
setup [:create_object]
setup do
expect_signature_checks_and_pass()
end
test "deletes chosen object", %{conn: conn} do
conn = delete(conn, Routes.upload_path(conn, :delete_object, ["test.txt"]))
assert response(conn, 204)
conn = get(conn, Routes.upload_path(conn, :show, ["test.txt"]))
assert response(conn, 404)
end
test "returns 404 if the path does not exist", %{conn: conn} do
conn = delete(conn, Routes.upload_path(conn, :delete_object, ["bad-path"]))
assert response(conn, 404)
end
end
defp create_object(_) do
%{object: %Object{path: "test.txt"} |> Repo.insert!()}
end
end

View file

@ -0,0 +1,46 @@
defmodule Legendary.ObjectStorageWeb.CheckSignatures.SignatureGeneratorTest do
use Legendary.ObjectStorageWeb.ConnCase
import Legendary.ObjectStorageWeb.CheckSignatures.SignatureGenerator
alias ExAws.S3
require IEx
describe "correct_signature_for_conn/1"do
test "handles signature in authorization header" do
conn =
"PUT"
|> build_conn("/uploads/sig-test.txt", %{"path" => ["sig-test.txt"]})
|> put_req_header("host", "localhost:4000")
|> put_req_header("x-amz-date", "20211015T000000Z")
|> put_req_header("authorization", "AWS4-HMAC-SHA256 SignedHeaders=host;x-amz-date")
|> assign(:body, "")
assert {:ok, sig} = correct_signature_for_conn(conn)
assert sig == "964cf3b50a10e020dee639986b2423118144e0ac4371f45a6ecf75adb043712b"
end
test "handles presigned url" do
{:ok, url} = S3.presigned_url(ExAws.Config.new(:s3), :put, "uploads", "hello-world.txt")
target_sig =
url
|> URI.parse()
|> Map.get(:query)
|> URI.decode_query()
|> Map.get("X-Amz-Signature")
conn =
"PUT"
|> build_conn(
url,
%{"path" => ["hello-world.txt"]}
)
|> assign(:body, nil)
|> put_req_header("host", "localhost:4000")
assert {:ok, sig} = correct_signature_for_conn(conn)
assert sig == target_sig
end
end
end

View file

@ -0,0 +1,44 @@
defmodule Legendary.ObjectStorageWeb.CheckSignaturesTest do
use Legendary.ObjectStorageWeb.ConnCase
import Legendary.ObjectStorageWeb.CheckSignatures
import Mox
alias Legendary.ObjectStorageWeb.CheckSignatures.MockSignatureGenerator
test "init/1 returns opts", do: assert init(nil) == nil
describe "call/2" do
test "with a good signature in header it continues", %{conn: conn} do
MockSignatureGenerator
|> expect(:correct_signature_for_conn, fn _conn -> {:ok, "good-sig"} end)
|> expect(:parse_authorization_header, fn _ -> %{"Signature" => "good-sig"} end)
refute call(conn, nil).halted
end
test "with a good signature in query params it continues", %{conn: conn} do
MockSignatureGenerator
|> expect(:correct_signature_for_conn, fn _conn -> {:ok, "good-sig"} end)
conn = %{conn | query_params: %{"X-Amz-Signature" => "good-sig"}}
refute call(conn, nil).halted
end
test "with a bad signature it halts", %{conn: conn} do
MockSignatureGenerator
|> expect(:correct_signature_for_conn, fn _conn -> {:ok, "good-sig"} end)
conn = %{conn | query_params: %{"X-Amz-Signature" => "bad-sig"}}
assert call(conn, nil).halted
end
test "with an expired request it halts", %{conn: conn} do
conn = %{conn | query_params: %{"X-Amz-Date" => "19000101T000000Z", "X-Amz-Expires" => 3600}}
assert call(conn, nil).halted
end
end
end

View file

@ -0,0 +1,14 @@
defmodule Legendary.ObjectStorageWeb.ErrorViewTest do
use Legendary.ObjectStorageWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Legendary.ObjectStorageWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(Legendary.ObjectStorageWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end

View file

@ -0,0 +1,8 @@
defmodule Legendary.ObjectStorageWeb.LayoutViewTest do
use Legendary.ObjectStorageWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end

View file

@ -0,0 +1,3 @@
defmodule Legendary.ObjectStorageWeb.PageViewTest do
use Legendary.ObjectStorageWeb.ConnCase, async: true
end

View file

View file

@ -0,0 +1,38 @@
defmodule Legendary.ObjectStorageWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Legendary.ObjectStorageWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import Legendary.ObjectStorageWeb.ChannelCase
# The default endpoint for testing
@endpoint Legendary.ObjectStorageWeb.Endpoint
end
end
setup tags do
pid = Sandbox.start_owner!(Legendary.ObjectStorage.Repo, shared: not tags[:async])
on_exit(fn -> Sandbox.stop_owner(pid) end)
:ok
end
end

View file

@ -0,0 +1,43 @@
defmodule Legendary.ObjectStorageWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `us Legendary.ObjectStorageWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
alias Legendary.ObjectStorage.Repo
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import Legendary.ObjectStorageWeb.ConnCase
import Legendary.ObjectStorageWeb.SignatureTestingUtilities
alias Legendary.ObjectStorageWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint Legendary.ObjectStorageWeb.Endpoint
end
end
setup tags do
pid = Sandbox.start_owner!(Repo, shared: not tags[:async])
on_exit(fn -> Sandbox.stop_owner(pid) end)
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end

View file

@ -0,0 +1,53 @@
defmodule Legendary.ObjectStorage.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Legendary.ObjectStorage.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
alias Legendary.ObjectStorage.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Legendary.ObjectStorage.DataCase
end
end
setup tags do
pid = Sandbox.start_owner!(Legendary.ObjectStorage.Repo, shared: not tags[:async])
on_exit(fn -> Sandbox.stop_owner(pid) end)
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end

View file

@ -0,0 +1,29 @@
defmodule Legendary.ObjectStorageWeb.SignatureTestingUtilities do
@moduledoc """
Utilities that make it easier to test controller actions which require auth
signatures.
"""
import Mox
alias Legendary.ObjectStorageWeb.CheckSignatures.MockSignatureGenerator
def expect_signature_checks_and_pass do
verify_on_exit!()
MockSignatureGenerator
|> expect(:correct_signature_for_conn, fn _conn -> {:ok, "good-sig"} end)
|> expect(:parse_authorization_header, fn _ -> %{"Signature" => "good-sig"} end)
:ok
end
def expect_signature_checks_and_fail do
verify_on_exit!()
MockSignatureGenerator
|> expect(:correct_signature_for_conn, fn _conn -> {:ok, "good-sig"} end)
|> expect(:parse_authorization_header, fn _ -> %{"Signature" => "bad-sig"} end)
:ok
end
end

View file

@ -0,0 +1,7 @@
ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Legendary.ObjectStorage.Repo, :manual)
Mox.defmock(
Legendary.ObjectStorageWeb.CheckSignatures.MockSignatureGenerator,
for: Legendary.ObjectStorageWeb.CheckSignatures.SignatureGenerator
)

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
config :kaffy,
otp_app: :admin,

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
[
{:admin, Legendary.Admin, false},
@ -6,6 +6,7 @@ use Mix.Config
{:core, Legendary.AuthWeb, false},
{:content, Legendary.Content, false},
{:core, Legendary.CoreWeb, false},
{:object_storage, Legendary.ObjectStorageWeb, false}
]
|> Enum.map(fn {otp_app, module, start_server} ->
endpoint = Module.concat(module, "Endpoint")
@ -25,6 +26,7 @@ end)
{:app, App.Repo},
{:content, Legendary.Content.Repo},
{:core, Legendary.Core.Repo},
{:object_storage, Legendary.ObjectStorage.Repo}
]
|> Enum.map(fn
{otp_app, repo} ->
@ -32,8 +34,7 @@ end)
ecto_repos: [repo],
generators: [context_app: otp_app]
config otp_app, repo,
pool: Legendary.Core.SharedDBConnectionPool
config otp_app, repo, pool: Legendary.Core.SharedDBConnectionPool
end)
config :core, :pow,
@ -64,23 +65,23 @@ config :content,
repo: Legendary.Content.Repo,
queues: [default: 10],
crontab: [
{"0 * * * *", Legendary.Content.Sitemaps},
{"0 * * * *", Legendary.Content.Sitemaps}
]
config :app,
Oban,
repo: App.Repo,
queues: [default: 10],
crontab: [
]
crontab: []
config :mnesia, dir: to_charlist(Path.expand("./priv/mnesia@#{Kernel.node}"))
config :mnesia, dir: to_charlist(Path.expand("./priv/mnesia@#{Kernel.node()}"))
# Feature flags
config :fun_with_flags, :cache,
enabled: true,
ttl: 300 # seconds
# seconds
ttl: 300
config :fun_with_flags, :persistence,
adapter: FunWithFlags.Store.Persistent.Ecto,
@ -92,7 +93,7 @@ config :fun_with_flags, :cache_bust_notifications,
client: App.PubSub
# Notifications can also be disabled, which will also remove the Redis/Redix dependency
config :fun_with_flags, :cache_bust_notifications, [enabled: false]
config :fun_with_flags, :cache_bust_notifications, enabled: false
import_config "email_styles.exs"
import_config "admin.exs"

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
# For development, we disable any cache and enable
# debugging and code reloading.
@ -12,6 +12,7 @@ use Mix.Config
{:core, Legendary.AuthWeb},
{:content, Legendary.Content},
{:core, Legendary.CoreWeb},
{:object_storage, Legendary.ObjectStorageWeb}
]
|> Enum.map(fn {otp_app, module} ->
config otp_app, Module.concat(module, "Endpoint"),
@ -46,7 +47,8 @@ end)
{:admin, Legendary.Admin.Repo},
{:app, App.Repo},
{:content, Legendary.Content.Repo},
{:core, Legendary.Core.Repo}
{:core, Legendary.Core.Repo},
{:object_storage, Legendary.ObjectStorage.Repo}
]
|> Enum.map(fn {otp_app, repo} ->
config otp_app, repo,
@ -63,10 +65,17 @@ config :core, Legendary.CoreMailer, adapter: Bamboo.LocalAdapter
config :libcluster,
topologies: [
erlang_hosts: [
strategy: Elixir.Cluster.Strategy.Gossip,
strategy: Elixir.Cluster.Strategy.Gossip
]
]
# Use this configuration to use Waffle with our internal object storage engine
# that simulates S3
config :waffle,
storage: Waffle.Storage.S3,
bucket: "uploads",
asset_host: "http://localhost:4000"
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
@ -90,3 +99,15 @@ config :libcluster,
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
config :object_storage,
bucket_name: "uploads"
config :ex_aws,
access_key_id: "dev-test-access-key-id",
secret_access_key: "dev-test-secret-access-key"
config :ex_aws, :s3,
scheme: "http://",
host: "localhost",
port: 4000

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
# Start with test config
import_config "test.exs"

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
config :core, :email, %{
styles: %{

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
@ -18,6 +18,7 @@ signing_salt = System.get_env("LIVE_VIEW_SIGNING_SALT")
{:app, AppWeb, true},
{:content, ContentWeb, false},
{:core, Legendary.CoreWeb, false},
{:object_storage, Legendary.ObjectStorageWeb, false}
]
|> Enum.map(fn {otp_app, module, start_server} ->
endpoint = Module.concat(module, "Endpoint")
@ -28,7 +29,9 @@ signing_salt = System.get_env("LIVE_VIEW_SIGNING_SALT")
[]
end
config otp_app, endpoint, [
config otp_app,
endpoint,
[
url: [host: "example.com", port: 80],
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
@ -57,7 +60,8 @@ database_url = System.get_env("DATABASE_URL")
{:admin, Legendary.Admin.Repo},
{:app, App.Repo},
{:content, Legendary.Content.Repo},
{:core, Legendary.Core.Repo}
{:core, Legendary.Core.Repo},
{:object_storage, Legendary.ObjectStorage.Repo}
]
|> Enum.map(fn {otp_app, repo} ->
config otp_app, repo,
@ -91,7 +95,29 @@ config :libcluster,
kubernetes_node_basename: System.get_env("NAME", "legendary"),
kubernetes_selector: "app=#{System.get_env("NAME", "legendary")}",
kubernetes_namespace: System.get_env("NAMESPACE", "legendary"),
polling_interval: 10_000]]]
polling_interval: 10_000
]
]
]
# Use this configuration to use Waffle with our internal object storage engine
# that simulates S3
config :waffle,
storage: Waffle.Storage.S3,
bucket: "uploads"
asset_host: "https://#{System.get_env("HOSTNAME")}"
config :object_storage,
bucket_name: "uploads"
config :ex_aws,
access_key_id: {:system, "OBJECT_STORAGE_ACCESS_KEY_ID"},
secret_access_key: {:system, "OBJECT_STORAGE_SECRET_ACCESS_KEY"}
config :ex_aws, :s3,
scheme: "https://",
host: {:system, "HOSTNAME"}
# ## Using releases (Elixir v1.9+)
#

View file

@ -1,4 +1,4 @@
use Mix.Config
import Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
@ -8,6 +8,7 @@ use Mix.Config
{:core, Legendary.AuthWeb},
{:content, Legendary.Content},
{:core, Legendary.CoreWeb},
{:object_storage, Legendary.ObjectStorageWeb}
]
|> Enum.map(fn {otp_app, module} ->
config otp_app, Module.concat(module, "Endpoint"),
@ -25,7 +26,8 @@ end)
{:admin, Legendary.Admin.Repo},
{:app, App.Repo},
{:content, Legendary.Content.Repo},
{:core, Legendary.Core.Repo}
{:core, Legendary.Core.Repo},
{:object_storage, Legendary.ObjectStorage.Repo}
]
|> Enum.map(fn {otp_app, repo} ->
config otp_app, repo,
@ -43,3 +45,22 @@ config :content, Oban, crontab: false, queues: false, plugins: false
config :logger, level: :warn
config :libcluster, topologies: []
config :waffle,
storage: Waffle.Storage.Local,
storage_dir_prefix: "priv/test/static/",
asset_host: "http://localhost:4000"
config :object_storage,
bucket_name: "uploads"
config :ex_aws,
access_key_id: "test-access-key-id",
secret_access_key: "test-secret-access-key"
config :ex_aws, :s3,
scheme: "http://",
host: "localhost",
port: 4000
config :object_storage, :signature_generator, Legendary.ObjectStorageWeb.CheckSignatures.MockSignatureGenerator

View file

@ -63,6 +63,16 @@ spec:
secretKeyRef:
name: legendary
key: live-view-signing-salt
- name: OBJECT_STORAGE_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: legendary
key: object-storage-access-key-id
- name: OBJECT_STORAGE_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: legendary
key: object-storage-secret-access-key
- name: SMTP_HOST
valueFrom:
secretKeyRef:

View file

@ -3,6 +3,7 @@
"bamboo_smtp": {:hex, :bamboo_smtp, "3.0.0", "b7f0c371af96a1cb7131908918b02abb228f9db234910bf10cf4fb177c083259", [:mix], [{:bamboo, "~> 1.2", [hex: :bamboo, repo: "hexpm", optional: false]}, {:gen_smtp, "~> 0.15.0", [hex: :gen_smtp, repo: "hexpm", optional: false]}], "hexpm", "77cb1fa3076b24109e54df622161fe1e5619376b4ecf86d8b99b46f327acc49f"},
"bcrypt_elixir": {:hex, :bcrypt_elixir, "1.1.1", "6b5560e47a02196ce5f0ab3f1d8265db79a23868c137e973b27afef928ed8006", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "10f658be786bd2daaadcd45cc5b598da01d5bbc313da4d0e3efb2d6a511d896d"},
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
"castore": {:hex, :castore, "0.1.11", "c0665858e0e1c3e8c27178e73dffea699a5b28eb72239a3b2642d208e8594914", [:mix], [], "hexpm", "91b009ba61973b532b84f7c09ce441cba7aa15cb8b006cf06c6f4bba18220081"},
"certifi": {:hex, :certifi, "2.6.1", "dbab8e5e155a0763eea978c913ca280a6b544bfa115633fa20249c3d396d9493", [:rebar3], [], "hexpm", "524c97b4991b3849dd5c17a631223896272c6b0af446778ba4675a1dff53bb7e"},
"cldr_utils": {:hex, :cldr_utils, "2.16.0", "5abd1835151e264f6f9a285ab8c7419954a45eec5ca5a356dea592faa23e80b9", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "3ef5dc0fdfe566a5a4b8bda726cf760ebada69c0600affc4cb02b5e8ae7f7b47"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
@ -22,9 +23,13 @@
"ecto_sql": {:hex, :ecto_sql, "3.7.0", "2fcaad4ab0c8d76a5afbef078162806adbe709c04160aca58400d5cbbe8eeac6", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.4.0 or ~> 0.5.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a26135dfa1d99bf87a928c464cfa25bba6535a4fe761eefa56077a4febc60f70"},
"elixir_make": {:hex, :elixir_make, "0.6.0", "38349f3e29aff4864352084fc736fa7fa0f2995a819a737554f7ebd28b85aaab", [:mix], [], "hexpm", "d522695b93b7f0b4c0fcb2dfe73a6b905b1c301226a5a55cb42e5b14d509e050"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"esbuild": {:hex, :esbuild, "0.3.1", "bf6a3783f8677aa93e8e6ee04b79eeceadb29e07255941fab7e50f1e3527f4a8", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}], "hexpm", "342ccd0eb2c64211326580189389d52cdf0f16f5ca22bc0267a66357e269a14a"},
"ex_aws": {:hex, :ex_aws, "2.1.9", "dc4865ecc20a05190a34a0ac5213e3e5e2b0a75a0c2835e923ae7bfeac5e3c31", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "3e6c776703c9076001fbe1f7c049535f042cb2afa0d2cbd3b47cbc4e92ac0d10"},
"ex_aws_s3": {:hex, :ex_aws_s3, "2.3.0", "5dfe50116bad048240bae7cd9418bfe23296542ff72a01b9138113a1cd31451c", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "0b13b11478825d62d2f6e57ae763695331be06f2216468f31bb304316758b096"},
"ex_cldr": {:hex, :ex_cldr, "2.23.2", "76c51b722cefdcd1a13eb5e7c7f4da5b9acfd64ff054424a977ff6e2d6a78981", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.5", [hex: :certifi, repo: "hexpm", optional: true]}, {:cldr_utils, "~> 2.15", [hex: :cldr_utils, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:gettext, "~> 0.13", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "d9ce03c8d3fdc7ab751bdb2be742b6972f94adc856d51dfe5bb06a51ac96b8f4"},
"ex_doc": {:hex, :ex_doc, "0.25.2", "4f1cae793c4d132e06674b282f1d9ea3bf409bcca027ddb2fe177c4eed6a253f", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "5b0c172e87ac27f14dfd152d52a145238ec71a95efbf29849550278c58a393d6"},
"ex_prompt": {:hex, :ex_prompt, "0.2.0", "4030424e9a7710e1939d81eea4a82af2e0a1826065adb28d59bc01e919af4a60", [:mix], [], "hexpm", "220ac023d87d529457b87c9db4b40ce542bff93ae2de16c582808c6822dfe3e8"},
"ex_url": {:hex, :ex_url, "1.3.1", "c39b2227c77342ca76f0a4d4d27858726abfebad463023264d3ba4d9549bbf4c", [:mix], [{:ex_cldr, "~> 2.18", [hex: :ex_cldr, repo: "hexpm", optional: true]}, {:ex_phone_number, "~> 0.1", [hex: :ex_phone_number, repo: "hexpm", optional: true]}, {:gettext, "~> 0.13", [hex: :gettext, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "b4e0d385e2c172643964dc6766c7b23fea85561af1c374759438b07faa9a801d"},
"excoveralls": {:hex, :excoveralls, "0.14.2", "f9f5fd0004d7bbeaa28ea9606251bb643c313c3d60710bad1f5809c845b748f0", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "ca6fd358621cb4d29311b29d4732c4d47dac70e622850979bc54ed9a3e50f3e1"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"floki": {:hex, :floki, "0.31.0", "f05ee8a8e6a3ced4e62beeb2c79a63bc8e12ab98fbaaf6e6a3d9b76b1278e23f", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "b05afa372f5c345a5bf240ac25ea1f0f3d5fcfd7490ac0beeb4a203f9444891e"},
@ -53,11 +58,12 @@
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
"mochiweb": {:hex, :mochiweb, "2.22.0", "f104d6747c01a330c38613561977e565b788b9170055c5241ac9dd6e4617cba5", [:rebar3], [], "hexpm", "cbbd1fd315d283c576d1c8a13e0738f6dafb63dc840611249608697502a07655"},
"mock": {:hex, :mock, "0.3.6", "e810a91fabc7adf63ab5fdbec5d9d3b492413b8cda5131a2a8aa34b4185eb9b4", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "bcf1d0a6826fb5aee01bae3d74474669a3fa8b2df274d094af54a25266a1ebd2"},
"mox": {:hex, :mox, "1.0.0", "4b3c7005173f47ff30641ba044eb0fe67287743eec9bd9545e37f3002b0a9f8b", [:mix], [], "hexpm", "201b0a20b7abdaaab083e9cf97884950f8a30a1350a1da403b3145e213c6f4df"},
"neotomex": {:hex, :neotomex, "0.1.7", "64f76513653aa87ea7abdde0fd600e56955d838020a13d88f2bf334c88ac3e7a", [:mix], [], "hexpm", "4b87b8f614d1cd89dc8ba80ba0e559bedb3ebf6f6d74cd774fcfdd215e861445"},
"nimble_parsec": {:hex, :nimble_parsec, "1.1.0", "3a6fca1550363552e54c216debb6a9e95bd8d32348938e13de5eda962c0d7f89", [:mix], [], "hexpm", "08eb32d66b706e913ff748f11694b17981c0b04a33ef470e33e11b3d3ac8f54b"},
"oban": {:hex, :oban, "2.9.1", "e92a96d4ddc3731816e7c6463b8f50f9bfaadc560686a23f10a5aac0fbeb7572", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f734b1fe0c2320a624eb03cc9d0036bd41ee6248f332805c68182e7de0a43514"},
"parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"},
"phoenix": {:hex, :phoenix, "1.6.0", "7b85023f7ddef9a5c70909a51cc37c8b868b474d853f90f4280efd26b0e7cce5", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 1.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "52ffdd31f2daeb399b2e1eb57d468f99a1ad6eee5d8ea19d2353492f06c9fc96"},
"phoenix": {:hex, :phoenix, "1.6.2", "6cbd5c8ed7a797f25a919a37fafbc2fb1634c9cdb12a4448d7a5d0b26926f005", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 1.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7bbee475acae0c3abc229b7f189e210ea788e63bd168e585f60c299a4b2f9133"},
"phoenix_ecto": {:hex, :phoenix_ecto, "4.4.0", "0672ed4e4808b3fbed494dded89958e22fb882de47a97634c0b13e7b0b5f7720", [:mix], [{:ecto, "~> 3.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "09864e558ed31ee00bd48fcc1d4fc58ae9678c9e81649075431e69dbabb43cc1"},
"phoenix_html": {:hex, :phoenix_html, "3.0.4", "232d41884fe6a9c42d09f48397c175cd6f0d443aaa34c7424da47604201df2e1", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "ce17fd3cf815b2ed874114073e743507704b1f5288bb03c304a77458485efc8b"},
"phoenix_html_sanitizer": {:hex, :phoenix_html_sanitizer, "1.1.0", "ea9e1162217621208ba6b2951a24abe2c06b39347f65c22c31312f9f5ac0fa75", [:mix], [{:html_sanitize_ex, "~> 1.1", [hex: :html_sanitize_ex, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}], "hexpm", "089f28f0592d58f7cf1f032b89c13e873dc73c77a2ccf3386aee976c6ff077c9"},
@ -78,12 +84,15 @@
"slugger": {:hex, :slugger, "0.3.0", "efc667ab99eee19a48913ccf3d038b1fb9f165fa4fbf093be898b8099e61b6ed", [:mix], [], "hexpm", "20d0ded0e712605d1eae6c5b4889581c3460d92623a930ddda91e0e609b5afba"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm", "94884f84783fc1ba027aba8fe8a7dae4aad78c98e9f9c76667ec3471585c08c6"},
"sweet_xml": {:hex, :sweet_xml, "0.7.1", "a2cac8e2101237e617dfa9d427d44b8aff38ba6294f313ffb4667524d6b71b98", [:mix], [], "hexpm", "8bc7b7b584a6a87113071d0d2fd39fe2251cf2224ecaeed7093bdac1b9c1555f"},
"swoosh": {:hex, :swoosh, "1.5.0", "2be4cfc1be10f2203d1854c85b18d8c7be0321445a782efd53ef0b2b88f03ce4", [:mix], [{:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b53891359e3ddca263ece784051243de84c9244c421a0dee1bff1d52fc5ca420"},
"telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"},
"telemetry_metrics": {:hex, :telemetry_metrics, "0.6.1", "315d9163a1d4660aedc3fee73f33f1d355dcc76c5c3ab3d59e76e3edf80eef1f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7be9e0871c41732c233be71e4be11b96e56177bf15dde64a8ac9ce72ac9834c6"},
"telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
"timex": {:hex, :timex, "3.7.6", "502d2347ec550e77fdf419bc12d15bdccd31266bb7d925b30bf478268098282f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "a296327f79cb1ec795b896698c56e662ed7210cc9eb31f0ab365eb3a62e2c589"},
"tzdata": {:hex, :tzdata, "1.1.0", "72f5babaa9390d0f131465c8702fa76da0919e37ba32baa90d93c583301a8359", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "18f453739b48d3dc5bcf0e8906d2dc112bb40baafe2c707596d89f3c8dd14034"},
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
"waffle": {:hex, :waffle, "1.1.5", "11b8b41c9dc46a21c8e1e619e1e9048d18d166b57b33d1fada8e11fcd4e678b3", [:mix], [{:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:ex_aws_s3, "~> 2.1", [hex: :ex_aws_s3, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "68e6f92b457b13c71e33cc23f7abb60446a01515dc6618b7d493d8cd466b1f39"},
"xml_builder": {:hex, :xml_builder, "2.2.0", "cc5f1eeefcfcde6e90a9b77fb6c490a20bc1b856a7010ce6396f6da9719cbbab", [:mix], [], "hexpm", "9d66d52fb917565d358166a4314078d39ef04d552904de96f8e73f68f64a62c9"},
"yamerl": {:hex, :yamerl, "0.8.1", "07da13ffa1d8e13948943789665c62ccd679dfa7b324a4a2ed3149df17f453a4", [:rebar3], [], "hexpm", "96cb30f9d64344fed0ef8a92e9f16f207de6c04dfff4f366752ca79f5bceb23f"},
"yaml_elixir": {:hex, :yaml_elixir, "2.8.0", "c7ff0034daf57279c2ce902788ce6fdb2445532eb4317e8df4b044209fae6832", [:mix], [{:yamerl, "~> 0.8", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "4b674bd881e373d1ac6a790c64b2ecb69d1fd612c2af3b22de1619c15473830b"},