preliminary removal of textile and big migration

This commit is contained in:
Luna D 2021-09-29 22:05:16 +02:00
parent e9fc4d28fb
commit 1739373bf7
No known key found for this signature in database
GPG key ID: 81AF416F2CC36FC8
40 changed files with 196 additions and 1673 deletions

View file

@ -1,54 +0,0 @@
defmodule Mix.Tasks.ConvertToMarkdown do
use Mix.Task
import Ecto.Query
alias Philomena.Repo
alias Philomena.Batch
alias PhilomenaWeb.TextileMarkdownRenderer
@modules [
{Philomena.Badges.Badge, [:description]},
{Philomena.Channels.Channel, [:description]},
{Philomena.Comments.Comment, [:body]},
{Philomena.Commissions.Commission, [:contact, :information, :will_create, :will_not_create]},
{Philomena.Commissions.Item, [:description, :add_ons]},
{Philomena.Conversations.Message, [:body]},
{Philomena.DnpEntries.DnpEntry, [:conditions, :reason, :instructions]},
{Philomena.Filters.Filter, [:description]},
{Philomena.Galleries.Gallery, [:description]},
{Philomena.Images.Image, [:description]},
{Philomena.ModNotes.ModNote, [:body]},
{Philomena.Posts.Post, [:body]},
{Philomena.Reports.Report, [:report]},
{Philomena.Tags.Tag, [:description]},
{Philomena.Users.User, [:description, :scratchpad]},
]
@shortdoc "Rewrites all database rows from Textile to Markdown."
@requirements ["app.start"]
@impl Mix.Task
def run(args) do
if Mix.env() == :prod and not Enum.member?(args, "--i-know-what-im-doing") do
raise "do not run this task in production unless you know what you're doing"
end
Enum.map(@modules, fn {mod, fields} ->
Batch.record_batches(mod, fn batch ->
Enum.map(batch, fn item ->
updates = Enum.reduce(fields, [], fn field, kwlist ->
fval = Map.fetch!(item, field)
[{:"#{field}_md", TextileMarkdownRenderer.render_one(%{body: fval})} | kwlist]
end)
(mod
|> where(id: ^item.id)
|> Repo.update_all(set: updates))
IO.write("\r#{mod}\t#{item.id}\t")
end)
end)
IO.puts("")
end)
end
end

View file

@ -3,9 +3,6 @@ defmodule Philomena.Badges.Badge do
import Ecto.Changeset import Ecto.Changeset
schema "badges" do schema "badges" do
# fixme: unneeded field
field :description_md, :string, default: ""
field :title, :string field :title, :string
field :description, :string, default: "" field :description, :string, default: ""
field :image, :string field :image, :string

View file

@ -11,10 +11,6 @@ defmodule Philomena.Channels.Channel do
# fixme: rails STI # fixme: rails STI
field :type, :string field :type, :string
# fixme: this is unused
field :description, :string
field :description_md, :string
field :short_name, :string field :short_name, :string
field :title, :string, default: "" field :title, :string, default: ""
field :tags, :string field :tags, :string

View file

@ -1,7 +1,6 @@
defmodule Philomena.Comments.Comment do defmodule Philomena.Comments.Comment do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Images.Image alias Philomena.Images.Image
alias Philomena.Users.User alias Philomena.Users.User
@ -12,7 +11,6 @@ defmodule Philomena.Comments.Comment do
belongs_to :deleted_by, User belongs_to :deleted_by, User
field :body, :string field :body, :string
field :body_md, :string
field :ip, EctoNetwork.INET field :ip, EctoNetwork.INET
field :fingerprint, :string field :fingerprint, :string
field :user_agent, :string, default: "" field :user_agent, :string, default: ""
@ -36,7 +34,6 @@ defmodule Philomena.Comments.Comment do
|> validate_length(:body, min: 1, max: 300_000, count: :bytes) |> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|> change(attribution) |> change(attribution)
|> put_name_at_post_time(attribution[:user]) |> put_name_at_post_time(attribution[:user])
|> put_markdown(attrs, :body, :body_md)
end end
def changeset(comment, attrs, edited_at \\ nil) do def changeset(comment, attrs, edited_at \\ nil) do
@ -46,7 +43,6 @@ defmodule Philomena.Comments.Comment do
|> validate_required([:body]) |> validate_required([:body])
|> validate_length(:body, min: 1, max: 300_000, count: :bytes) |> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|> validate_length(:edit_reason, max: 70, count: :bytes) |> validate_length(:edit_reason, max: 70, count: :bytes)
|> put_markdown(attrs, :body, :body_md)
end end
def hide_changeset(comment, attrs, user) do def hide_changeset(comment, attrs, user) do
@ -67,7 +63,6 @@ defmodule Philomena.Comments.Comment do
change(comment) change(comment)
|> put_change(:destroyed_content, true) |> put_change(:destroyed_content, true)
|> put_change(:body, "") |> put_change(:body, "")
|> put_change(:body_md, "")
end end
defp put_name_at_post_time(changeset, nil), do: changeset defp put_name_at_post_time(changeset, nil), do: changeset

View file

@ -1,7 +1,6 @@
defmodule Philomena.Commissions.Commission do defmodule Philomena.Commissions.Commission do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Commissions.Item alias Philomena.Commissions.Item
alias Philomena.Images.Image alias Philomena.Images.Image
@ -18,10 +17,6 @@ defmodule Philomena.Commissions.Commission do
field :contact, :string field :contact, :string
field :will_create, :string field :will_create, :string
field :will_not_create, :string field :will_not_create, :string
field :information_md, :string
field :contact_md, :string
field :will_create_md, :string
field :will_not_create_md, :string
field :commission_items_count, :integer, default: 0 field :commission_items_count, :integer, default: 0
timestamps(inserted_at: :created_at, type: :utc_datetime) timestamps(inserted_at: :created_at, type: :utc_datetime)
@ -46,10 +41,6 @@ defmodule Philomena.Commissions.Commission do
|> validate_length(:will_create, max: 1000, count: :bytes) |> validate_length(:will_create, max: 1000, count: :bytes)
|> validate_length(:will_not_create, max: 1000, count: :bytes) |> validate_length(:will_not_create, max: 1000, count: :bytes)
|> validate_subset(:categories, Keyword.values(categories())) |> validate_subset(:categories, Keyword.values(categories()))
|> put_markdown(attrs, :information, :information_md)
|> put_markdown(attrs, :contact, :contact_md)
|> put_markdown(attrs, :will_create, :will_create_md)
|> put_markdown(attrs, :will_not_create, :will_not_create_md)
end end
defp drop_blank_categories(changeset) do defp drop_blank_categories(changeset) do

View file

@ -1,7 +1,6 @@
defmodule Philomena.Commissions.Item do defmodule Philomena.Commissions.Item do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Commissions.Commission alias Philomena.Commissions.Commission
alias Philomena.Images.Image alias Philomena.Images.Image
@ -12,10 +11,8 @@ defmodule Philomena.Commissions.Item do
field :item_type, :string field :item_type, :string
field :description, :string field :description, :string
field :description_md, :string
field :base_price, :decimal field :base_price, :decimal
field :add_ons, :string field :add_ons, :string
field :add_ons_md, :string
timestamps(inserted_at: :created_at, type: :utc_datetime) timestamps(inserted_at: :created_at, type: :utc_datetime)
end end
@ -30,7 +27,5 @@ defmodule Philomena.Commissions.Item do
|> validate_number(:base_price, greater_than_or_equal_to: 0, less_than_or_equal_to: 99_999) |> validate_number(:base_price, greater_than_or_equal_to: 0, less_than_or_equal_to: 99_999)
|> validate_inclusion(:item_type, Commission.types()) |> validate_inclusion(:item_type, Commission.types())
|> foreign_key_constraint(:example_image_id, name: :fk_rails_56d368749a) |> foreign_key_constraint(:example_image_id, name: :fk_rails_56d368749a)
|> put_markdown(attrs, :description, :description_md)
|> put_markdown(attrs, :add_ons, :add_ons_md)
end end
end end

View file

@ -1,7 +1,6 @@
defmodule Philomena.Conversations.Message do defmodule Philomena.Conversations.Message do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Conversations.Conversation alias Philomena.Conversations.Conversation
alias Philomena.Users.User alias Philomena.Users.User
@ -11,7 +10,6 @@ defmodule Philomena.Conversations.Message do
belongs_to :from, User belongs_to :from, User
field :body, :string field :body, :string
field :body_md, :string
timestamps(inserted_at: :created_at, type: :utc_datetime) timestamps(inserted_at: :created_at, type: :utc_datetime)
end end
@ -30,6 +28,5 @@ defmodule Philomena.Conversations.Message do
|> validate_required([:body]) |> validate_required([:body])
|> put_assoc(:from, user) |> put_assoc(:from, user)
|> validate_length(:body, max: 300_000, count: :bytes) |> validate_length(:body, max: 300_000, count: :bytes)
|> put_markdown(attrs, :body, :body_md)
end end
end end

View file

@ -1,7 +1,6 @@
defmodule Philomena.DnpEntries.DnpEntry do defmodule Philomena.DnpEntries.DnpEntry do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Tags.Tag alias Philomena.Tags.Tag
alias Philomena.Users.User alias Philomena.Users.User
@ -18,9 +17,6 @@ defmodule Philomena.DnpEntries.DnpEntry do
field :hide_reason, :boolean, default: false field :hide_reason, :boolean, default: false
field :instructions, :string, default: "" field :instructions, :string, default: ""
field :feedback, :string, default: "" field :feedback, :string, default: ""
field :conditions_md, :string, default: ""
field :reason_md, :string, default: ""
field :instructions_md, :string, default: ""
timestamps(inserted_at: :created_at, type: :utc_datetime) timestamps(inserted_at: :created_at, type: :utc_datetime)
end end
@ -39,9 +35,6 @@ defmodule Philomena.DnpEntries.DnpEntry do
|> validate_required([:reason, :dnp_type]) |> validate_required([:reason, :dnp_type])
|> validate_inclusion(:dnp_type, types()) |> validate_inclusion(:dnp_type, types())
|> validate_conditions() |> validate_conditions()
|> put_markdown(attrs, :conditions, :conditions_md)
|> put_markdown(attrs, :reason, :reason_md)
|> put_markdown(attrs, :instructions, :instructions_md)
|> foreign_key_constraint(:tag_id, name: "fk_rails_473a736b4a") |> foreign_key_constraint(:tag_id, name: "fk_rails_473a736b4a")
end end

View file

@ -10,9 +10,6 @@ defmodule Philomena.Filters.Filter do
schema "filters" do schema "filters" do
belongs_to :user, User belongs_to :user, User
# fixme: unneeded field
field :description_md, :string, default: ""
field :name, :string field :name, :string
field :description, :string, default: "" field :description, :string, default: ""
field :system, :boolean field :system, :boolean

View file

@ -14,9 +14,6 @@ defmodule Philomena.Galleries.Gallery do
has_many :subscriptions, Subscription has_many :subscriptions, Subscription
has_many :subscribers, through: [:subscriptions, :user] has_many :subscribers, through: [:subscriptions, :user]
# fixme: unneeded field
field :description_md, :string, default: ""
field :title, :string field :title, :string
field :spoiler_warning, :string, default: "" field :spoiler_warning, :string, default: ""
field :description, :string, default: "" field :description, :string, default: ""

View file

@ -3,7 +3,6 @@ defmodule Philomena.Images.Image do
import Ecto.Changeset import Ecto.Changeset
import Ecto.Query import Ecto.Query
import Philomena.MarkdownWriter
alias Philomena.ImageIntensities.ImageIntensity alias Philomena.ImageIntensities.ImageIntensity
alias Philomena.ImageVotes.ImageVote alias Philomena.ImageVotes.ImageVote
@ -65,7 +64,6 @@ defmodule Philomena.Images.Image do
field :votes_count, :integer, default: 0 field :votes_count, :integer, default: 0
field :source_url, :string field :source_url, :string
field :description, :string, default: "" field :description, :string, default: ""
field :description_md, :string, default: ""
field :image_sha512_hash, :string field :image_sha512_hash, :string
field :image_orig_sha512_hash, :string field :image_orig_sha512_hash, :string
field :deletion_reason, :string field :deletion_reason, :string
@ -82,7 +80,6 @@ defmodule Philomena.Images.Image do
field :destroyed_content, :boolean field :destroyed_content, :boolean
field :hidden_image_key, :string field :hidden_image_key, :string
field :scratchpad, :string field :scratchpad, :string
field :scratchpad_md, :string
field :hides_count, :integer, default: 0 field :hides_count, :integer, default: 0
# todo: can probably remove these now # todo: can probably remove these now
@ -123,7 +120,6 @@ defmodule Philomena.Images.Image do
|> change(first_seen_at: now) |> change(first_seen_at: now)
|> change(attribution) |> change(attribution)
|> validate_length(:description, max: 50_000, count: :bytes) |> validate_length(:description, max: 50_000, count: :bytes)
|> put_markdown(attrs, :description, :description_md)
|> validate_format(:source_url, ~r/\Ahttps?:\/\//) |> validate_format(:source_url, ~r/\Ahttps?:\/\//)
end end
@ -220,7 +216,6 @@ defmodule Philomena.Images.Image do
image image
|> cast(attrs, [:description]) |> cast(attrs, [:description])
|> validate_length(:description, max: 50_000, count: :bytes) |> validate_length(:description, max: 50_000, count: :bytes)
|> put_markdown(attrs, :description, :description_md)
end end
def hide_changeset(image, attrs, user) do def hide_changeset(image, attrs, user) do
@ -275,7 +270,6 @@ defmodule Philomena.Images.Image do
def scratchpad_changeset(image, attrs) do def scratchpad_changeset(image, attrs) do
cast(image, attrs, [:scratchpad]) cast(image, attrs, [:scratchpad])
|> put_markdown(attrs, :scratchpad, :scratchpad_md)
end end
def remove_source_history_changeset(image) do def remove_source_history_changeset(image) do

View file

@ -7,7 +7,7 @@ defmodule Philomena.Markdown do
def to_html_unsafe(text, replacements), def to_html_unsafe(text, replacements),
do: Philomena.Native.markdown_to_html_unsafe(text, replacements) do: Philomena.Native.markdown_to_html_unsafe(text, replacements)
def escape_markdown(text) do def escape(text) do
@markdown_chars @markdown_chars
|> Regex.replace(text, fn m -> |> Regex.replace(text, fn m ->
"\\#{m}" "\\#{m}"

View file

@ -1,12 +0,0 @@
defmodule Philomena.MarkdownWriter do
import Ecto.Changeset
alias PhilomenaWeb.TextileMarkdownRenderer
def put_markdown(obj, attrs, field, field_md) do
val = attrs[field] || attrs[to_string(field)] || ""
md = TextileMarkdownRenderer.render_one(%{body: val})
obj
|> put_change(field_md, md)
end
end

View file

@ -1,7 +1,6 @@
defmodule Philomena.ModNotes.ModNote do defmodule Philomena.ModNotes.ModNote do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Users.User alias Philomena.Users.User
@ -13,7 +12,6 @@ defmodule Philomena.ModNotes.ModNote do
field :notable_type, :string field :notable_type, :string
field :body, :string field :body, :string
field :body_md, :string
field :notable, :any, virtual: true field :notable, :any, virtual: true
@ -26,6 +24,5 @@ defmodule Philomena.ModNotes.ModNote do
|> cast(attrs, [:notable_id, :notable_type, :body]) |> cast(attrs, [:notable_id, :notable_type, :body])
|> validate_required([:notable_id, :notable_type, :body]) |> validate_required([:notable_id, :notable_type, :body])
|> validate_inclusion(:notable_type, ["User", "Report", "DnpEntry"]) |> validate_inclusion(:notable_type, ["User", "Report", "DnpEntry"])
|> put_markdown(attrs, :body, :body_md)
end end
end end

View file

@ -1,7 +1,6 @@
defmodule Philomena.Posts.Post do defmodule Philomena.Posts.Post do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Users.User alias Philomena.Users.User
alias Philomena.Topics.Topic alias Philomena.Topics.Topic
@ -12,7 +11,6 @@ defmodule Philomena.Posts.Post do
belongs_to :deleted_by, User belongs_to :deleted_by, User
field :body, :string field :body, :string
field :body_md, :string
field :edit_reason, :string field :edit_reason, :string
field :ip, EctoNetwork.INET field :ip, EctoNetwork.INET
field :fingerprint, :string field :fingerprint, :string
@ -37,7 +35,6 @@ defmodule Philomena.Posts.Post do
|> validate_required([:body]) |> validate_required([:body])
|> validate_length(:body, min: 1, max: 300_000, count: :bytes) |> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|> validate_length(:edit_reason, max: 70, count: :bytes) |> validate_length(:edit_reason, max: 70, count: :bytes)
|> put_markdown(attrs, :body, :body_md)
end end
@doc false @doc false
@ -48,7 +45,6 @@ defmodule Philomena.Posts.Post do
|> validate_length(:body, min: 1, max: 300_000, count: :bytes) |> validate_length(:body, min: 1, max: 300_000, count: :bytes)
|> change(attribution) |> change(attribution)
|> put_name_at_post_time(attribution[:user]) |> put_name_at_post_time(attribution[:user])
|> put_markdown(attrs, :body, :body_md)
end end
@doc false @doc false
@ -61,7 +57,6 @@ defmodule Philomena.Posts.Post do
|> change(attribution) |> change(attribution)
|> change(topic_position: 0) |> change(topic_position: 0)
|> put_name_at_post_time(attribution[:user]) |> put_name_at_post_time(attribution[:user])
|> put_markdown(attrs, :body, :body_md)
end end
def hide_changeset(post, attrs, user) do def hide_changeset(post, attrs, user) do
@ -82,7 +77,6 @@ defmodule Philomena.Posts.Post do
change(post) change(post)
|> put_change(:destroyed_content, true) |> put_change(:destroyed_content, true)
|> put_change(:body, "") |> put_change(:body, "")
|> put_change(:body_md, "")
end end
defp put_name_at_post_time(changeset, nil), do: changeset defp put_name_at_post_time(changeset, nil), do: changeset

View file

@ -1,7 +1,6 @@
defmodule Philomena.Reports.Report do defmodule Philomena.Reports.Report do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Users.User alias Philomena.Users.User
@ -14,7 +13,6 @@ defmodule Philomena.Reports.Report do
field :user_agent, :string, default: "" field :user_agent, :string, default: ""
field :referrer, :string, default: "" field :referrer, :string, default: ""
field :reason, :string field :reason, :string
field :reason_md, :string
field :state, :string, default: "open" field :state, :string, default: "open"
field :open, :boolean, default: true field :open, :boolean, default: true
@ -80,11 +78,9 @@ defmodule Philomena.Reports.Report do
defp merge_category(changeset) do defp merge_category(changeset) do
reason = get_field(changeset, :reason) reason = get_field(changeset, :reason)
category = get_field(changeset, :category) category = get_field(changeset, :category)
new_reason = joiner(category, reason)
changeset changeset
|> change(reason: new_reason) |> change(reason: joiner(category, reason))
|> put_markdown(%{reason: new_reason}, :reason, :reason_md)
end end
defp joiner(category, ""), do: category defp joiner(category, ""), do: category

View file

@ -2,7 +2,6 @@ defmodule Philomena.Tags.Tag do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Ecto.Query import Ecto.Query
import Philomena.MarkdownWriter
alias Philomena.Channels.Channel alias Philomena.Channels.Channel
alias Philomena.DnpEntries.DnpEntry alias Philomena.DnpEntries.DnpEntry
@ -78,7 +77,6 @@ defmodule Philomena.Tags.Tag do
field :category, :string field :category, :string
field :images_count, :integer, default: 0 field :images_count, :integer, default: 0
field :description, :string field :description, :string
field :description_md, :string
field :short_description, :string field :short_description, :string
field :namespace, :string field :namespace, :string
field :name_in_namespace, :string field :name_in_namespace, :string
@ -101,7 +99,6 @@ defmodule Philomena.Tags.Tag do
|> cast(attrs, [:category, :description, :short_description, :mod_notes]) |> cast(attrs, [:category, :description, :short_description, :mod_notes])
|> put_change(:implied_tag_list, Enum.map_join(tag.implied_tags, ",", & &1.name)) |> put_change(:implied_tag_list, Enum.map_join(tag.implied_tags, ",", & &1.name))
|> validate_required([]) |> validate_required([])
|> put_markdown(attrs, :description, :description_md)
end end
def changeset(tag, attrs, implied_tags) do def changeset(tag, attrs, implied_tags) do
@ -109,7 +106,6 @@ defmodule Philomena.Tags.Tag do
|> cast(attrs, [:category, :description, :short_description, :mod_notes]) |> cast(attrs, [:category, :description, :short_description, :mod_notes])
|> put_assoc(:implied_tags, implied_tags) |> put_assoc(:implied_tags, implied_tags)
|> validate_required([]) |> validate_required([])
|> put_markdown(attrs, :description, :description_md)
end end
def image_changeset(tag, attrs) do def image_changeset(tag, attrs) do

View file

@ -1,252 +0,0 @@
defmodule Philomena.Textile.Lexer do
import NimbleParsec
token_list =
Enum.to_list(0x01..0x29) ++
Enum.to_list(0x2B..0x2F) ++
':;<=>?[]\\^`~|'
space_list = '\f \r\t\u00a0\u1680\u180e\u202f\u205f\u3000' ++ Enum.to_list(0x2000..0x200A)
space = utf8_char(space_list)
extended_space =
choice([
space,
string("\n"),
eos()
])
space_token =
space
|> unwrap_and_tag(:space)
double_newline =
string("\n")
|> repeat(space)
|> string("\n")
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:double_newline)
newline =
string("\n")
|> unwrap_and_tag(:newline)
link_ending_characters = utf8_char('@#$%&(),.:;<=?\\`|\'')
bracket_link_ending_characters = utf8_char('" []')
end_of_link =
choice([
concat(link_ending_characters, extended_space),
string("[/"),
extended_space
])
bracketed_literal =
ignore(string("[=="))
|> repeat(lookahead_not(string("==]")) |> utf8_char([]))
|> ignore(string("==]"))
unbracketed_literal =
ignore(string("=="))
|> repeat(lookahead_not(string("==")) |> utf8_char([]))
|> ignore(string("=="))
literal =
choice([
bracketed_literal,
unbracketed_literal
])
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:literal)
bq_cite_start =
string("[bq=\"")
|> unwrap_and_tag(:bq_cite_start)
bq_cite_open =
string("\"]")
|> unwrap_and_tag(:bq_cite_open)
bq_open =
string("[bq]")
|> unwrap_and_tag(:bq_open)
bq_close =
string("[/bq]")
|> unwrap_and_tag(:bq_close)
spoiler_open =
string("[spoiler]")
|> unwrap_and_tag(:spoiler_open)
spoiler_close =
string("[/spoiler]")
|> unwrap_and_tag(:spoiler_close)
image_url_scheme =
choice([
string("//"),
string("/"),
string("https://"),
string("http://")
])
link_url_scheme =
choice([
string("#"),
image_url_scheme
])
defparsec(
:unbracketed_url_inside,
choice([
string("(") |> parsec(:unbracketed_url_inside) |> string(")"),
lookahead_not(end_of_link) |> utf8_char([])
])
|> repeat()
)
unbracketed_url =
string(":")
|> concat(link_url_scheme)
|> parsec(:unbracketed_url_inside)
unbracketed_image_url =
unbracketed_url
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:unbracketed_image_url)
unbracketed_link_url =
string("\"")
|> concat(unbracketed_url)
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:unbracketed_link_url)
unbracketed_image =
ignore(string("!"))
|> concat(image_url_scheme)
|> repeat(utf8_char(not: ?!))
|> ignore(string("!"))
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:unbracketed_image)
|> concat(optional(unbracketed_image_url))
bracketed_image =
ignore(string("[!"))
|> concat(image_url_scheme)
|> repeat(lookahead_not(string("!]")) |> utf8_char([]))
|> ignore(string("!]"))
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:bracketed_image)
|> concat(optional(unbracketed_image_url))
link_delim =
string("\"")
|> unwrap_and_tag(:link_delim)
bracketed_link_open =
string("[\"")
|> unwrap_and_tag(:bracketed_link_open)
bracketed_link_url =
string("\":")
|> concat(link_url_scheme)
|> repeat(lookahead_not(bracket_link_ending_characters) |> utf8_char([]))
|> ignore(string("]"))
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:bracketed_link_url)
bracketed_b_open = string("[**") |> unwrap_and_tag(:bracketed_b_open)
bracketed_i_open = string("[__") |> unwrap_and_tag(:bracketed_i_open)
bracketed_strong_open = string("[*") |> unwrap_and_tag(:bracketed_strong_open)
bracketed_em_open = string("[_") |> unwrap_and_tag(:bracketed_em_open)
bracketed_code_open = string("[@") |> unwrap_and_tag(:bracketed_code_open)
bracketed_ins_open = string("[+") |> unwrap_and_tag(:bracketed_ins_open)
bracketed_sup_open = string("[^") |> unwrap_and_tag(:bracketed_sup_open)
bracketed_del_open = string("[-") |> unwrap_and_tag(:bracketed_del_open)
bracketed_sub_open = string("[~") |> unwrap_and_tag(:bracketed_sub_open)
bracketed_b_close = string("**]") |> unwrap_and_tag(:bracketed_b_close)
bracketed_i_close = string("__]") |> unwrap_and_tag(:bracketed_i_close)
bracketed_strong_close = string("*]") |> unwrap_and_tag(:bracketed_strong_close)
bracketed_em_close = string("_]") |> unwrap_and_tag(:bracketed_em_close)
bracketed_code_close = string("@]") |> unwrap_and_tag(:bracketed_code_close)
bracketed_ins_close = string("+]") |> unwrap_and_tag(:bracketed_ins_close)
bracketed_sup_close = string("^]") |> unwrap_and_tag(:bracketed_sup_close)
bracketed_del_close = string("-]") |> unwrap_and_tag(:bracketed_del_close)
bracketed_sub_close = string("~]") |> unwrap_and_tag(:bracketed_sub_close)
b_delim = string("**") |> unwrap_and_tag(:b_delim)
i_delim = string("__") |> unwrap_and_tag(:i_delim)
strong_delim = string("*") |> unwrap_and_tag(:strong_delim)
em_delim = string("_") |> unwrap_and_tag(:em_delim)
code_delim = string("@") |> unwrap_and_tag(:code_delim)
ins_delim = string("+") |> unwrap_and_tag(:ins_delim)
sup_delim = lookahead_not(string("^"), string("^")) |> unwrap_and_tag(:sup_delim)
sub_delim = string("~") |> unwrap_and_tag(:sub_delim)
del_delim =
lookahead_not(string("-"), choice([string("-"), string(">")])) |> unwrap_and_tag(:del_delim)
quicktxt =
utf8_char(Enum.map(space_list ++ token_list ++ '\n', fn c -> {:not, c} end))
|> unwrap_and_tag(:quicktxt)
char =
utf8_char([])
|> unwrap_and_tag(:char)
textile =
choice([
literal,
double_newline,
newline,
space_token,
bq_cite_start,
bq_cite_open,
bq_open,
bq_close,
spoiler_open,
spoiler_close,
unbracketed_image,
bracketed_image,
bracketed_link_open,
bracketed_link_url,
unbracketed_link_url,
link_delim,
bracketed_b_open,
bracketed_i_open,
bracketed_strong_open,
bracketed_em_open,
bracketed_code_open,
bracketed_ins_open,
bracketed_sup_open,
bracketed_del_open,
bracketed_sub_open,
bracketed_b_close,
bracketed_i_close,
bracketed_strong_close,
bracketed_em_close,
bracketed_code_close,
bracketed_ins_close,
bracketed_sup_close,
bracketed_del_close,
bracketed_sub_close,
b_delim,
i_delim,
strong_delim,
em_delim,
code_delim,
ins_delim,
sup_delim,
del_delim,
sub_delim,
quicktxt,
char
])
|> repeat()
|> eos()
defparsec(:lex, textile)
end

View file

@ -1,480 +0,0 @@
defmodule Philomena.Textile.Parser do
alias Philomena.Textile.Lexer
alias Phoenix.HTML
def parse(parser, input) do
parser = Map.put(parser, :state, %{})
with {:ok, tokens, _1, _2, _3, _4} <- Lexer.lex(String.trim(input || "")),
{:ok, tree, []} <- repeat(&textile/2, parser, tokens) do
partial_flatten(tree)
else
_ ->
[]
end
end
# Helper to turn a parse tree into a string
def flatten(tree) do
tree
|> List.flatten()
|> Enum.map_join("", fn {_k, v} -> v end)
end
# Helper to escape HTML
defp escape(text) do
text
|> HTML.html_escape()
|> HTML.safe_to_string()
end
# Helper to turn a parse tree into a list
def partial_flatten(tree) do
tree
|> List.flatten()
|> Enum.chunk_by(fn {k, _v} -> k end)
|> Enum.map(fn list ->
[{type, _v} | _rest] = list
value = Enum.map_join(list, "", fn {_k, v} -> v end)
{type, value}
end)
end
defp put_state(parser, new_state) do
state = Map.put(parser.state, new_state, true)
Map.put(parser, :state, state)
end
# Helper corresponding to Kleene star (*) operator
# Match a specificed rule zero or more times
defp repeat(rule, parser, tokens) do
case rule.(parser, tokens) do
{:ok, tree, r_tokens} ->
{:ok, tree2, r2_tokens} = repeat(rule, parser, r_tokens)
{:ok, [tree, tree2], r2_tokens}
_ ->
{:ok, [], tokens}
end
end
# Helper to match a simple recursive grammar rule of the following form:
#
# open_token callback* close_token
#
defp simple_recursive(open_token, close_token, open_tag, close_tag, callback, parser, [
{open_token, open} | r_tokens
]) do
case repeat(callback, parser, r_tokens) do
{:ok, tree, [{^close_token, _} | r2_tokens]} ->
{:ok, [{:markup, open_tag}, tree, {:markup, close_tag}], r2_tokens}
{:ok, tree, r2_tokens} ->
{:ok, [{:text, escape(open)}, tree], r2_tokens}
end
end
defp simple_recursive(
_open_token,
_close_token,
_open_tag,
_close_tag,
_callback,
_parser,
_tokens
) do
{:error, "Expected a simple recursive rule"}
end
# Helper to match a simple recursive grammar rule with negative lookahead:
#
# open_token callback* close_token (?!lookahead_not)
#
defp simple_lookahead_not(
open_token,
close_token,
open_tag,
close_tag,
lookahead_not,
callback,
state,
parser,
[{open_token, open} | r_tokens]
) do
case parser.state do
%{^state => _} ->
{:error, "End of rule"}
_ ->
case r_tokens do
[{forbidden_lookahead, _la} | _] when forbidden_lookahead in [:space, :newline] ->
{:ok, [{:text, escape(open)}], r_tokens}
_ ->
case repeat(callback, put_state(parser, state), r_tokens) do
{:ok, tree, [{^close_token, close}, {^lookahead_not, ln} | r2_tokens]} ->
{:ok, [{:text, escape(open)}, tree, {:text, escape(close)}],
[{lookahead_not, ln} | r2_tokens]}
{:ok, tree, [{^close_token, _} | r2_tokens]} ->
{:ok, [{:markup, open_tag}, tree, {:markup, close_tag}], r2_tokens}
{:ok, tree, r2_tokens} ->
{:ok, [{:text, escape(open)}, tree], r2_tokens}
end
end
end
end
defp simple_lookahead_not(
_open_token,
_close_token,
_open_tag,
_close_tag,
_lookahead_not,
_callback,
_state,
_parser,
_tokens
) do
{:error, "Expected a simple lookahead not rule"}
end
# Helper to efficiently assemble a UTF-8 binary from tokens of the
# given type
defp assemble_binary(token_type, accumulator, [{token_type, t} | stream]) do
assemble_binary(token_type, accumulator <> <<t::utf8>>, stream)
end
defp assemble_binary(_token_type, accumulator, tokens), do: {accumulator, tokens}
#
# inline_textile_element =
# opening_markup inline_textile_element* closing_markup (?!quicktxt) |
# closing_markup (?=quicktxt) |
# link_delim block_textile_element* link_url |
# image url? |
# code_delim inline_textile_element* code_delim |
# inline_textile_element_not_opening_markup;
#
defp inline_textile_element(parser, tokens) do
[
{:b_delim, :b, "<b>", "</b>"},
{:i_delim, :i, "<i>", "</i>"},
{:strong_delim, :strong, "<strong>", "</strong>"},
{:em_delim, :em, "<em>", "</em>"},
{:ins_delim, :ins, "<ins>", "</ins>"},
{:sup_delim, :sup, "<sup>", "</sup>"},
{:del_delim, :del, "<del>", "</del>"},
{:sub_delim, :sub, "<sub>", "</sub>"}
]
|> Enum.find_value(fn {delim_token, state, open_tag, close_tag} ->
simple_lookahead_not(
delim_token,
delim_token,
open_tag,
close_tag,
:quicktxt,
&inline_textile_element/2,
state,
parser,
tokens
)
|> case do
{:ok, tree, r_tokens} ->
{:ok, tree, r_tokens}
_ ->
nil
end
end)
|> case do
nil -> inner_inline_textile_element(parser, tokens)
value -> value
end
end
defp inner_inline_textile_element(parser, [{token, t}, {:quicktxt, q} | r_tokens])
when token in [
:b_delim,
:i_delim,
:strong_delim,
:em_delim,
:ins_delim,
:sup_delim,
:del_delim,
:sub_delim
] do
case inline_textile_element(parser, [{:quicktxt, q} | r_tokens]) do
{:ok, tree, r2_tokens} ->
{:ok, [{:text, escape(t)}, tree], r2_tokens}
_ ->
{:ok, [{:text, escape(t)}], [{:quicktxt, q} | r_tokens]}
end
end
defp inner_inline_textile_element(parser, [{:link_delim, open} | r_tokens]) do
case repeat(&block_textile_element/2, parser, r_tokens) do
{:ok, tree, [{:unbracketed_link_url, <<"\":", url::binary>>} | r2_tokens]} ->
href = escape(url)
{:ok,
[{:markup, "<a href=\""}, {:markup, href}, {:markup, "\">"}, tree, {:markup, "</a>"}],
r2_tokens}
{:ok, tree, r2_tokens} ->
{:ok, [{:text, escape(open)}, tree], r2_tokens}
end
end
defp inner_inline_textile_element(parser, [{:bracketed_link_open, open} | r_tokens]) do
case repeat(&inline_textile_element/2, parser, r_tokens) do
{:ok, tree, [{:bracketed_link_url, <<"\":", url::binary>>} | r2_tokens]} ->
href = escape(url)
{:ok,
[{:markup, "<a href=\""}, {:markup, href}, {:markup, "\">"}, tree, {:markup, "</a>"}],
r2_tokens}
{:ok, tree, r2_tokens} ->
{:ok, [{:text, escape(open)}, tree], r2_tokens}
end
end
defp inner_inline_textile_element(parser, [
{token, img},
{:unbracketed_image_url, <<":", url::binary>>} | r_tokens
])
when token in [:unbracketed_image, :bracketed_image] do
img = parser.image_transform.(img)
{:ok,
[
{:markup, "<a href=\""},
{:markup, escape(url)},
{:markup, "\"><span class=\"imgspoiler\"><img src=\""},
{:markup, escape(img)},
{:markup, "\"/></span></a>"}
], r_tokens}
end
defp inner_inline_textile_element(parser, [{token, img} | r_tokens])
when token in [:unbracketed_image, :bracketed_image] do
img = parser.image_transform.(img)
{:ok,
[
{:markup, "<span class=\"imgspoiler\"><img src=\""},
{:markup, escape(img)},
{:markup, "\"/></span>"}
], r_tokens}
end
defp inner_inline_textile_element(parser, [{:code_delim, open} | r_tokens]) do
case parser.state do
%{code: _} ->
{:error, "End of rule"}
_ ->
case repeat(&inline_textile_element/2, put_state(parser, :code), r_tokens) do
{:ok, tree, [{:code_delim, _} | r2_tokens]} ->
{:ok, [{:markup, "<code>"}, tree, {:markup, "</code>"}], r2_tokens}
{:ok, tree, r2_tokens} ->
{:ok, [{:text, escape(open)}, tree], r2_tokens}
end
end
end
defp inner_inline_textile_element(parser, tokens) do
inline_textile_element_not_opening_markup(parser, tokens)
end
#
# bq_cite_text = (?!bq_cite_open);
#
# Note that text is not escaped here because it will be escaped
# when the tree is flattened
defp bq_cite_text(_parser, [{:bq_cite_open, _open} | _rest]) do
{:error, "Expected cite tokens"}
end
defp bq_cite_text(_parser, [{:char, lit} | r_tokens]) do
{:ok, [{:text, <<lit::utf8>>}], r_tokens}
end
defp bq_cite_text(_parser, [{:quicktxt, lit} | r_tokens]) do
{:ok, [{:text, <<lit::utf8>>}], r_tokens}
end
defp bq_cite_text(_parser, [{:space, _} | r_tokens]) do
{:ok, [{:text, " "}], r_tokens}
end
defp bq_cite_text(_parser, [{_token, t} | r_tokens]) do
{:ok, [{:text, t}], r_tokens}
end
defp bq_cite_text(_parser, _tokens) do
{:error, "Expected cite tokens"}
end
#
# inline_textile_element_not_opening_markup =
# literal | space | char |
# quicktxt opening_markup quicktxt |
# quicktxt |
# opening_block_tag block_textile_element* closing_block_tag;
#
defp inline_textile_element_not_opening_markup(_parser, [{:literal, lit} | r_tokens]) do
{:ok, [{:markup, "<span class=\"literal\">"}, {:markup, escape(lit)}, {:markup, "</span>"}],
r_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [{:space, _} | r_tokens]) do
{:ok, [{:text, " "}], r_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [{:char, lit} | r_tokens]) do
{binary, r2_tokens} = assemble_binary(:char, <<lit::utf8>>, r_tokens)
{:ok, [{:text, escape(binary)}], r2_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [
{:quicktxt, q1},
{token, t},
{:quicktxt, q2} | r_tokens
])
when token in [
:b_delim,
:i_delim,
:strong_delim,
:em_delim,
:ins_delim,
:sup_delim,
:del_delim,
:sub_delim
] do
{:ok, [{:text, escape(<<q1::utf8>>)}, {:text, escape(t)}, {:text, escape(<<q2::utf8>>)}],
r_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [{:quicktxt, lit} | r_tokens]) do
{:ok, [{:text, escape(<<lit::utf8>>)}], r_tokens}
end
defp inline_textile_element_not_opening_markup(parser, [{:bq_cite_start, start} | r_tokens]) do
case repeat(&bq_cite_text/2, parser, r_tokens) do
{:ok, tree, [{:bq_cite_open, open} | r2_tokens]} ->
case repeat(&block_textile_element/2, parser, r2_tokens) do
{:ok, tree2, [{:bq_close, _} | r3_tokens]} ->
cite = escape(flatten(tree))
{:ok,
[
{:markup, "<blockquote author=\""},
{:markup, cite},
{:markup, "\">"},
tree2,
{:markup, "</blockquote>"}
], r3_tokens}
{:ok, tree2, r3_tokens} ->
{:ok,
[
{:text, escape(start)},
{:text, escape(flatten(tree))},
{:text, escape(open)},
tree2
], r3_tokens}
end
_ ->
{:ok, [{:text, escape(start)}], r_tokens}
end
end
defp inline_textile_element_not_opening_markup(_parser, [{:bq_cite_open, tok} | r_tokens]) do
{:ok, [{:text, escape(tok)}], r_tokens}
end
defp inline_textile_element_not_opening_markup(parser, tokens) do
[
{:bq_open, :bq_close, "<blockquote>", "</blockquote>"},
{:spoiler_open, :spoiler_close, "<span class=\"spoiler\">", "</span>"},
{:bracketed_b_open, :bracketed_b_close, "<b>", "</b>"},
{:bracketed_i_open, :bracketed_i_close, "<i>", "</i>"},
{:bracketed_strong_open, :bracketed_strong_close, "<strong>", "</strong>"},
{:bracketed_em_open, :bracketed_em_close, "<em>", "</em>"},
{:bracketed_code_open, :bracketed_code_close, "<code>", "</code>"},
{:bracketed_ins_open, :bracketed_ins_close, "<ins>", "</ins>"},
{:bracketed_sup_open, :bracketed_sup_close, "<sup>", "</sup>"},
{:bracketed_del_open, :bracketed_del_close, "<del>", "</del>"},
{:bracketed_sub_open, :bracketed_sub_close, "<sub>", "</sub>"}
]
|> Enum.find_value(fn {open_token, close_token, open_tag, close_tag} ->
simple_recursive(
open_token,
close_token,
open_tag,
close_tag,
&block_textile_element/2,
parser,
tokens
)
|> case do
{:ok, tree, r_tokens} ->
{:ok, tree, r_tokens}
_ ->
nil
end
end)
|> Kernel.||({:error, "Expected block markup"})
end
#
# block_textile_element =
# double_newline | newline | inline_textile_element;
#
defp block_textile_element(_parser, [{:double_newline, _} | r_tokens]) do
{:ok, [{:markup, "<br/><br/>"}], r_tokens}
end
defp block_textile_element(_parser, [{:newline, _} | r_tokens]) do
{:ok, [{:markup, "<br/>"}], r_tokens}
end
defp block_textile_element(parser, tokens) do
inline_textile_element(parser, tokens)
end
#
# textile =
# (block_textile_element | TOKEN)* eos;
#
defp textile(parser, tokens) do
case block_textile_element(parser, tokens) do
{:ok, tree, r_tokens} ->
{:ok, tree, r_tokens}
_ ->
case tokens do
[{_, string} | r_tokens] ->
{:ok, [{:text, escape(string)}], r_tokens}
_ ->
{:error, "Expected textile"}
end
end
end
end

View file

@ -1,546 +0,0 @@
# LUNA PRESENTS THEE
#
# DA ULTIMATE, BESTEST, MOST SECURE AND DEFINITELY NOT BUGGY
# TEXTILE TO MARKDOWN CONVERTER PARSER LIBRARY THING!!!!!
#
# IT'S SO AWESOME I HAVE TO DESCRIBE IT IN ALL CAPS
#
# BY LOOKING AT THIS SOURCE CODE YOU AGREE THAT I MAY NOT BE HELD
# RESPONSIBLE FOR YOU DEVELOPING EYE CANCER
#
# YOU'VE BEEN WARNED
#
# COPYRIGHT (C) (R) (TM) LUNA (C) (R) (TM) 2021-206969696969
defmodule Philomena.Textile.ParserMarkdown do
alias Philomena.Textile.Lexer
alias Philomena.Markdown
def parse(parser, input) do
parser = Map.put(parser, :state, %{})
with {:ok, tokens, _1, _2, _3, _4} <- Lexer.lex(String.trim(input || "")),
{:ok, tree, [], _level} <- repeat(&textile/3, parser, tokens, 0) do
partial_flatten(tree)
else
_ ->
[]
end
end
# Helper to turn a parse tree into a string
def flatten(tree) do
tree
|> List.flatten()
|> Enum.map_join("", fn {_k, v} -> v end)
end
def flatten_unquote(tree) do
tree
|> List.flatten()
|> Enum.map_join("", fn {_k, v} ->
Regex.replace(~r/\n(> )/, v, "\n")
end)
end
# Helper to turn a parse tree into a list
def partial_flatten(tree) do
tree
|> List.flatten()
|> Enum.chunk_by(fn {k, _v} -> k end)
|> Enum.map(fn list ->
[{type, _v} | _rest] = list
value = Enum.map_join(list, "", fn {_k, v} -> v end)
{type, value}
end)
end
defp put_state(parser, new_state) do
state = Map.put(parser.state, new_state, true)
Map.put(parser, :state, state)
end
# Helper corresponding to Kleene star (*) operator
# Match a specificed rule zero or more times
defp repeat(rule, parser, tokens, level) do
case rule.(parser, tokens, level) do
{:ok, tree, r_tokens} ->
{:ok, tree2, r2_tokens, level} = repeat(rule, parser, r_tokens, level)
{:ok, [tree, tree2], r2_tokens, level}
_ ->
{:ok, [], tokens, level}
end
end
# Helper to match a simple recursive grammar rule of the following form:
#
# open_token callback* close_token
#
defp simple_recursive(
open_token,
close_token,
open_tag,
close_tag,
callback,
parser,
[
{open_token, open} | r_tokens
],
level
) do
case repeat(callback, parser, r_tokens, level) do
{:ok, tree, [{^close_token, _} | r2_tokens], _level} ->
{:ok, [{:markup, open_tag}, tree, {:markup, close_tag}], r2_tokens}
{:ok, tree, r2_tokens, _level} ->
{:ok, [{:text, open}, tree], r2_tokens}
end
end
defp simple_recursive(
_open_token,
_close_token,
_open_tag,
_close_tag,
_callback,
_parser,
_tokens,
_level
) do
{:error, "Expected a simple recursive rule"}
end
# Helper to match a simple recursive grammar rule with negative lookahead:
#
# open_token callback* close_token (?!lookahead_not)
#
defp simple_lookahead_not(
open_token,
close_token,
open_tag,
close_tag,
lookahead_not,
callback,
state,
parser,
[{open_token, open} | r_tokens],
level
) do
case parser.state do
%{^state => _} ->
{:error, "End of rule"}
_ ->
case r_tokens do
[{forbidden_lookahead, _la} | _] when forbidden_lookahead in [:space, :newline] ->
{:ok, [{:text, open}], r_tokens}
_ ->
case repeat(callback, put_state(parser, state), r_tokens, level) do
{:ok, tree, [{^close_token, close}, {^lookahead_not, ln} | r2_tokens], _level} ->
{:ok, [{:text, open}, tree, {:text, close}], [{lookahead_not, ln} | r2_tokens]}
{:ok, tree, [{^close_token, _} | r2_tokens], _level} ->
{:ok, [{:markup, open_tag}, tree, {:markup, close_tag}], r2_tokens}
{:ok, tree, r2_tokens, _level} ->
{:ok, [{:text, open}, tree], r2_tokens}
end
end
end
end
defp simple_lookahead_not(
_open_token,
_close_token,
_open_tag,
_close_tag,
_lookahead_not,
_callback,
_state,
_parser,
_tokens,
_level
) do
{:error, "Expected a simple lookahead not rule"}
end
# Helper to efficiently assemble a UTF-8 binary from tokens of the
# given type
defp assemble_binary(token_type, accumulator, [{token_type, t} | stream]) do
assemble_binary(token_type, accumulator <> <<t::utf8>>, stream)
end
defp assemble_binary(_token_type, accumulator, tokens), do: {accumulator, tokens}
#
# inline_textile_element =
# opening_markup inline_textile_element* closing_markup (?!quicktxt) |
# closing_markup (?=quicktxt) |
# link_delim block_textile_element* link_url |
# image url? |
# code_delim inline_textile_element* code_delim |
# inline_textile_element_not_opening_markup;
#
defp inline_textile_element(parser, tokens, level) do
[
{:b_delim, :b, "**", "**"},
{:i_delim, :i, "_", "_"},
{:strong_delim, :strong, "**", "**"},
{:em_delim, :em, "*", "*"},
{:ins_delim, :ins, "__", "__"},
{:sup_delim, :sup, "^", "^"},
{:del_delim, :del, "~~", "~~"},
{:sub_delim, :sub, "%", "%"}
]
|> Enum.find_value(fn {delim_token, state, open_tag, close_tag} ->
simple_lookahead_not(
delim_token,
delim_token,
open_tag,
close_tag,
:quicktxt,
&inline_textile_element/3,
state,
parser,
tokens,
level
)
|> case do
{:ok, tree, r_tokens} ->
{:ok, tree, r_tokens}
_ ->
nil
end
end)
|> case do
nil -> inner_inline_textile_element(parser, tokens, level)
value -> value
end
end
defp inner_inline_textile_element(parser, [{token, t}, {:quicktxt, q} | r_tokens], level)
when token in [
:b_delim,
:i_delim,
:strong_delim,
:em_delim,
:ins_delim,
:sup_delim,
:del_delim,
:sub_delim
] do
case inline_textile_element(parser, [{:quicktxt, q} | r_tokens], level) do
{:ok, tree, r2_tokens} ->
{:ok, [{:text, t}, tree], r2_tokens}
_ ->
{:ok, [{:text, t}], [{:quicktxt, q} | r_tokens]}
end
end
defp inner_inline_textile_element(parser, [{:link_delim, open} | r_tokens], level) do
case repeat(&block_textile_element/3, parser, r_tokens, level) do
{:ok, tree, [{:unbracketed_link_url, <<"\":", url::binary>>} | r2_tokens], _level} ->
href = url
{:ok, [{:markup, "["}, tree, {:markup, "]("}, {:markup, href}, {:markup, ")"}], r2_tokens}
{:ok, tree, r2_tokens, _level} ->
{:ok, [{:text, open}, tree], r2_tokens}
end
end
defp inner_inline_textile_element(parser, [{:bracketed_link_open, open} | r_tokens], level) do
case repeat(&inline_textile_element/3, parser, r_tokens, level) do
{:ok, tree, [{:bracketed_link_url, <<"\":", url::binary>>} | r2_tokens], _level} ->
href = url
{:ok, [{:markup, "["}, tree, {:markup, "]("}, {:markup, href}, {:markup, ")"}], r2_tokens}
{:ok, tree, r2_tokens, _level} ->
{:ok, [{:text, open}, tree], r2_tokens}
end
end
defp inner_inline_textile_element(
parser,
[
{token, img},
{:unbracketed_image_url, <<":", url::binary>>} | r_tokens
],
_level
)
when token in [:unbracketed_image, :bracketed_image] do
img = parser.image_transform.(img)
{:ok,
[
{:markup, "[![full]("},
{:markup, img},
{:markup, ")]("},
{:markup, url},
{:markup, ")"}
], r_tokens}
end
defp inner_inline_textile_element(parser, [{token, img} | r_tokens], _level)
when token in [:unbracketed_image, :bracketed_image] do
img = parser.image_transform.(img)
{:ok,
[
{:markup, "![full]("},
{:markup, img},
{:markup, ")"}
], r_tokens}
end
defp inner_inline_textile_element(parser, [{:code_delim, open} | r_tokens], level) do
case parser.state do
%{code: _} ->
{:error, "End of rule"}
_ ->
case repeat(&inline_textile_element/3, put_state(parser, :code), r_tokens, level) do
{:ok, tree, [{:code_delim, _} | r2_tokens], _level} ->
{:ok, [{:markup, "`"}, tree, {:markup, "`"}], r2_tokens}
{:ok, tree, r2_tokens, _level} ->
{:ok, [{:text, open}, tree], r2_tokens}
end
end
end
defp inner_inline_textile_element(parser, tokens, level) do
inline_textile_element_not_opening_markup(parser, tokens, level)
end
#
# bq_cite_text = (?!bq_cite_open);
#
# Note that text is not escaped here because it will be escaped
# when the tree is flattened
defp bq_cite_text(_parser, [{:bq_cite_open, _open} | _rest], _level) do
{:error, "Expected cite tokens"}
end
defp bq_cite_text(_parser, [{:char, lit} | r_tokens], _level) do
{:ok, [{:text, <<lit::utf8>>}], r_tokens}
end
defp bq_cite_text(_parser, [{:quicktxt, lit} | r_tokens], _level) do
{:ok, [{:text, <<lit::utf8>>}], r_tokens}
end
defp bq_cite_text(_parser, [{:space, _} | r_tokens], _level) do
{:ok, [{:text, " "}], r_tokens}
end
defp bq_cite_text(_parser, [{_token, t} | r_tokens], _level) do
{:ok, [{:text, t}], r_tokens}
end
defp bq_cite_text(_parser, _tokens, _level) do
{:error, "Expected cite tokens"}
end
#
# inline_textile_element_not_opening_markup =
# literal | space | char |
# quicktxt opening_markup quicktxt |
# quicktxt |
# opening_block_tag block_textile_element* closing_block_tag;
#
defp inline_textile_element_not_opening_markup(_parser, [{:literal, lit} | r_tokens], _level) do
{:ok, [{:markup, Markdown.escape_markdown(lit)}], r_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [{:space, _} | r_tokens], _level) do
{:ok, [{:text, " "}], r_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [{:char, lit} | r_tokens], _level) do
{binary, r2_tokens} = assemble_binary(:char, <<lit::utf8>>, r_tokens)
{:ok, [{:text, binary}], r2_tokens}
end
defp inline_textile_element_not_opening_markup(
_parser,
[
{:quicktxt, q1},
{token, t},
{:quicktxt, q2} | r_tokens
],
_level
)
when token in [
:b_delim,
:i_delim,
:strong_delim,
:em_delim,
:ins_delim,
:sup_delim,
:del_delim,
:sub_delim
] do
{:ok, [{:text, <<q1::utf8>>}, {:text, t}, {:text, <<q2::utf8>>}], r_tokens}
end
defp inline_textile_element_not_opening_markup(_parser, [{:quicktxt, lit} | r_tokens], _level) do
{:ok, [{:text, <<lit::utf8>>}], r_tokens}
end
defp inline_textile_element_not_opening_markup(
parser,
[{:bq_cite_start, start} | r_tokens],
level
) do
case repeat(&bq_cite_text/3, parser, r_tokens, level) do
{:ok, tree, [{:bq_cite_open, open} | r2_tokens], _level} ->
case repeat(&block_textile_element/3, parser, r2_tokens, level + 1) do
{:ok, tree2, [{:bq_close, _} | r3_tokens], level} ->
{:ok,
[
{:markup, "\n" <> String.duplicate("> ", level)},
tree2,
{:markup, "\n" <> String.duplicate("> ", level - 1)}
], r3_tokens}
{:ok, tree2, r3_tokens, _level} ->
{:ok,
[
{:text, start},
{:text, flatten(tree)},
{:text, open},
tree2
], r3_tokens}
end
_ ->
{:ok, [{:text, start}], r_tokens}
end
end
defp inline_textile_element_not_opening_markup(
_parser,
[{:bq_cite_open, tok} | r_tokens],
_level
) do
{:ok, [{:text, tok}], r_tokens}
end
defp inline_textile_element_not_opening_markup(
parser,
[{:bq_open, start} | r_tokens],
level
) do
case repeat(&block_textile_element/3, parser, r_tokens, level + 1) do
{:ok, tree, [{:bq_close, _} | r2_tokens], level} ->
{:ok,
[
{:markup, "\n" <> String.duplicate("> ", level)},
tree,
{:markup, "\n" <> String.duplicate("> ", level - 1)}
], r2_tokens}
{:ok, tree, r2_tokens, _level} ->
{:ok,
[
{:text, start},
{:text, flatten_unquote(tree)}
], r2_tokens}
end
end
defp inline_textile_element_not_opening_markup(parser, tokens, level) do
[
{:spoiler_open, :spoiler_close, "||", "||"},
{:bracketed_b_open, :bracketed_b_close, "**", "**"},
{:bracketed_i_open, :bracketed_i_close, "_", "_"},
{:bracketed_strong_open, :bracketed_strong_close, "**", "**"},
{:bracketed_em_open, :bracketed_em_close, "*", "*"},
{:bracketed_code_open, :bracketed_code_close, "```", "```"},
{:bracketed_ins_open, :bracketed_ins_close, "__", "__"},
{:bracketed_sup_open, :bracketed_sup_close, "^", "^"},
{:bracketed_del_open, :bracketed_del_close, "~~", "~~"},
{:bracketed_sub_open, :bracketed_sub_close, "%", "%"}
]
|> Enum.find_value(fn {open_token, close_token, open_tag, close_tag} ->
simple_recursive(
open_token,
close_token,
open_tag,
close_tag,
&block_textile_element/3,
parser,
tokens,
level
)
|> case do
{:ok, tree, r_tokens} ->
{:ok, tree, r_tokens}
_ ->
nil
end
end)
|> Kernel.||({:error, "Expected block markup"})
end
#
# block_textile_element =
# double_newline | newline | inline_textile_element;
#
defp block_textile_element(_parser, [{:double_newline, _} | r_tokens], level)
when level > 0 do
one = "\n" <> String.duplicate("> ", level)
{:ok, [{:markup, String.duplicate(one, 2)}], r_tokens}
end
defp block_textile_element(_parser, [{:newline, _} | r_tokens], level) when level > 0 do
{:ok, [{:markup, "\n" <> String.duplicate("> ", level)}], r_tokens}
end
# &nbsp;
defp block_textile_element(_parser, [{:double_newline, _} | r_tokens], level)
when level == 0 do
{:ok, [{:markup, "\n\u00a0\n"}], r_tokens}
end
defp block_textile_element(_parser, [{:newline, _} | r_tokens], level) when level == 0 do
{:ok, [{:markup, "\u00a0\n"}], r_tokens}
end
defp block_textile_element(parser, tokens, level) do
inline_textile_element(parser, tokens, level)
end
#
# textile =
# (block_textile_element | TOKEN)* eos;
#
defp textile(parser, tokens, level) do
case block_textile_element(parser, tokens, level) do
{:ok, tree, r_tokens} ->
{:ok, tree, r_tokens}
_ ->
case tokens do
[{_, string} | r_tokens] ->
{:ok, [{:text, string}], r_tokens}
_ ->
{:error, "Expected textile"}
end
end
end
end

View file

@ -4,7 +4,6 @@ defmodule Philomena.Users.User do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Philomena.MarkdownWriter
alias Philomena.Schema.TagList alias Philomena.Schema.TagList
alias Philomena.Schema.Search alias Philomena.Schema.Search
@ -66,7 +65,6 @@ defmodule Philomena.Users.User do
field :slug, :string field :slug, :string
field :role, :string, default: "user" field :role, :string, default: "user"
field :description, :string field :description, :string
field :description_md, :string
field :avatar, :string field :avatar, :string
# Settings # Settings
@ -117,7 +115,6 @@ defmodule Philomena.Users.User do
field :last_renamed_at, :utc_datetime field :last_renamed_at, :utc_datetime
field :deleted_at, :utc_datetime field :deleted_at, :utc_datetime
field :scratchpad, :string field :scratchpad, :string
field :scratchpad_md, :string
field :secondary_role, :string field :secondary_role, :string
field :hide_default_role, :boolean, default: false field :hide_default_role, :boolean, default: false
field :senior_staff, :boolean, default: false field :senior_staff, :boolean, default: false
@ -366,7 +363,6 @@ defmodule Philomena.Users.User do
|> cast(attrs, [:description, :personal_title]) |> cast(attrs, [:description, :personal_title])
|> validate_length(:description, max: 10_000, count: :bytes) |> validate_length(:description, max: 10_000, count: :bytes)
|> validate_length(:personal_title, max: 24, count: :bytes) |> validate_length(:personal_title, max: 24, count: :bytes)
|> put_markdown(attrs, :description, :description_md)
|> validate_format( |> validate_format(
:personal_title, :personal_title,
~r/\A((?!site|admin|moderator|assistant|developer|\p{C}).)*\z/iu ~r/\A((?!site|admin|moderator|assistant|developer|\p{C}).)*\z/iu
@ -376,7 +372,6 @@ defmodule Philomena.Users.User do
def scratchpad_changeset(user, attrs) do def scratchpad_changeset(user, attrs) do
user user
|> cast(attrs, [:scratchpad]) |> cast(attrs, [:scratchpad])
|> put_markdown(attrs, :scratchpad, :scratchpad_md)
end end
def name_changeset(user, attrs) do def name_changeset(user, attrs) do

View file

@ -44,7 +44,7 @@ defmodule PhilomenaWeb.Admin.DnpEntryController do
bodies = bodies =
dnp_entries dnp_entries
|> Enum.map(&%{body: &1.conditions, body_md: &1.conditions_md}) |> Enum.map(&%{body: &1.conditions})
|> TextRenderer.render_collection(conn) |> TextRenderer.render_collection(conn)
dnp_entries = %{dnp_entries | entries: Enum.zip(bodies, dnp_entries.entries)} dnp_entries = %{dnp_entries | entries: Enum.zip(bodies, dnp_entries.entries)}

View file

@ -73,7 +73,7 @@ defmodule PhilomenaWeb.Admin.ReportController do
reportable: [reportable_id: :reportable_type] reportable: [reportable_id: :reportable_type]
) )
body = TextRenderer.render_one(%{body: report.reason, body_md: report.reason_md}, conn) body = TextRenderer.render_one(%{body: report.reason}, conn)
render(conn, "show.html", title: "Showing Report", report: report, body: body) render(conn, "show.html", title: "Showing Report", report: report, body: body)
end end

View file

@ -43,7 +43,7 @@ defmodule PhilomenaWeb.DnpEntryController do
bodies = bodies =
dnp_entries dnp_entries
|> Enum.map(&%{body_md: &1.conditions_md, body: &1.conditions || "-"}) |> Enum.map(&%{body: &1.conditions || "-"})
|> TextRenderer.render_collection(conn) |> TextRenderer.render_collection(conn)
dnp_entries = %{dnp_entries | entries: Enum.zip(bodies, dnp_entries.entries)} dnp_entries = %{dnp_entries | entries: Enum.zip(bodies, dnp_entries.entries)}
@ -63,9 +63,9 @@ defmodule PhilomenaWeb.DnpEntryController do
[conditions, reason, instructions] = [conditions, reason, instructions] =
TextRenderer.render_collection( TextRenderer.render_collection(
[ [
%{body_md: dnp_entry.conditions_md, body: dnp_entry.conditions || "-"}, %{body: dnp_entry.conditions || "-"},
%{body_md: dnp_entry.reason_md, body: dnp_entry.reason || "-"}, %{body: dnp_entry.reason || "-"},
%{body_md: dnp_entry.instructions_md, body: dnp_entry.instructions || "-"} %{body: dnp_entry.instructions || "-"}
], ],
conn conn
) )

View file

@ -35,7 +35,7 @@ defmodule PhilomenaWeb.Image.DescriptionController do
Images.reindex_image(image) Images.reindex_image(image)
body = body =
TextRenderer.render_one(%{body: image.description, body_md: image.description_md}, conn) TextRenderer.render_one(%{body: image.description}, conn)
conn conn
|> put_view(PhilomenaWeb.ImageView) |> put_view(PhilomenaWeb.ImageView)

View file

@ -68,7 +68,7 @@ defmodule PhilomenaWeb.ImageController do
comments = %{comments | entries: Enum.zip(comments.entries, rendered)} comments = %{comments | entries: Enum.zip(comments.entries, rendered)}
description = description =
%{body: image.description, body_md: image.description_md} %{body: image.description}
|> TextRenderer.render_one(conn) |> TextRenderer.render_one(conn)
interactions = Interactions.user_interactions([image], conn.assigns.current_user) interactions = Interactions.user_interactions([image], conn.assigns.current_user)

View file

@ -36,21 +36,21 @@ defmodule PhilomenaWeb.Profile.CommissionController do
item_descriptions = item_descriptions =
items items
|> Enum.map(&%{body: &1.description, body_md: &1.description_md}) |> Enum.map(&%{body: &1.description})
|> TextRenderer.render_collection(conn) |> TextRenderer.render_collection(conn)
item_add_ons = item_add_ons =
items items
|> Enum.map(&%{body: &1.add_ons, body_md: &1.add_ons_md}) |> Enum.map(&%{body: &1.add_ons})
|> TextRenderer.render_collection(conn) |> TextRenderer.render_collection(conn)
[information, contact, will_create, will_not_create] = [information, contact, will_create, will_not_create] =
TextRenderer.render_collection( TextRenderer.render_collection(
[ [
%{body_md: commission.information_md, body: commission.information || ""}, %{body: commission.information || ""},
%{body_md: commission.contact_md, body: commission.contact || ""}, %{body: commission.contact || ""},
%{body_md: commission.will_create_md, body: commission.will_create || ""}, %{body: commission.will_create || ""},
%{body_md: commission.will_not_create_md, body: commission.will_not_create || ""} %{body: commission.will_not_create || ""}
], ],
conn conn
) )

View file

@ -135,10 +135,10 @@ defmodule PhilomenaWeb.ProfileController do
|> Enum.zip(recent_comments) |> Enum.zip(recent_comments)
about_me = about_me =
TextRenderer.render_one(%{body_md: user.description_md, body: user.description || ""}, conn) TextRenderer.render_one(%{body: user.description || ""}, conn)
scratchpad = scratchpad =
TextRenderer.render_one(%{body_md: user.scratchpad_md, body: user.scratchpad || ""}, conn) TextRenderer.render_one(%{body: user.scratchpad || ""}, conn)
commission_information = commission_info(user.commission, conn) commission_information = commission_info(user.commission, conn)
@ -216,9 +216,9 @@ defmodule PhilomenaWeb.ProfileController do
defp map_fetch(nil, _field_name), do: nil defp map_fetch(nil, _field_name), do: nil
defp map_fetch(map, field_name), do: Map.get(map, field_name) defp map_fetch(map, field_name), do: Map.get(map, field_name)
defp commission_info(%{information: info, information_md: info_md}, conn) defp commission_info(%{information: info}, conn)
when info not in [nil, ""], when info not in [nil, ""],
do: TextRenderer.render_one(%{body: info, body_md: info_md}, conn) do: TextRenderer.render_one(%{body: info}, conn)
defp commission_info(_commission, _conn), do: "" defp commission_info(_commission, _conn), do: ""

View file

@ -62,11 +62,11 @@ defmodule PhilomenaWeb.TagController do
interactions = Interactions.user_interactions(images, user) interactions = Interactions.user_interactions(images, user)
body = body =
TextRenderer.render_one(%{body_md: tag.description_md, body: tag.description || ""}, conn) TextRenderer.render_one(%{body: tag.description || ""}, conn)
dnp_bodies = dnp_bodies =
TextRenderer.render_collection( TextRenderer.render_collection(
Enum.map(tag.dnp_entries, &%{body_md: &1.conditions_md, body: &1.conditions || ""}), Enum.map(tag.dnp_entries, &%{body: &1.conditions || ""}),
conn conn
) )

View file

@ -133,14 +133,14 @@ defmodule PhilomenaWeb.ImageLoader do
defp render_bodies([tag], conn) do defp render_bodies([tag], conn) do
dnp_bodies = dnp_bodies =
TextRenderer.render_collection( TextRenderer.render_collection(
Enum.map(tag.dnp_entries, &%{body_md: &1.conditions_md, body: &1.conditions || ""}), Enum.map(tag.dnp_entries, &%{body: &1.conditions || ""}),
conn conn
) )
dnp_entries = Enum.zip(dnp_bodies, tag.dnp_entries) dnp_entries = Enum.zip(dnp_bodies, tag.dnp_entries)
description = description =
TextRenderer.render_one(%{body_md: tag.description_md, body: tag.description || ""}, conn) TextRenderer.render_one(%{body: tag.description || ""}, conn)
[{tag, description, dnp_entries}] [{tag, description, dnp_entries}]
end end

View file

@ -43,7 +43,7 @@ defmodule PhilomenaWeb.LimitPlug do
is_staff(conn.assigns.current_user) and skip_staff -> is_staff(conn.assigns.current_user) and skip_staff ->
conn conn
conn.assigns.current_user.bypass_rate_limits -> bypasses_rate_limits(conn.assigns.current_user) ->
conn conn
conn.assigns.ajax? -> conn.assigns.ajax? ->
@ -71,6 +71,9 @@ defmodule PhilomenaWeb.LimitPlug do
defp is_staff(%User{role: "assistant"}), do: true defp is_staff(%User{role: "assistant"}), do: true
defp is_staff(_), do: false defp is_staff(_), do: false
defp bypasses_rate_limits(%User{bypass_rate_limits: true}), do: true
defp bypasses_rate_limits(_), do: false
defp current_user_id(%{id: id}), do: id defp current_user_id(%{id: id}), do: id
defp current_user_id(_), do: nil defp current_user_id(_), do: nil

View file

@ -18,7 +18,7 @@ div
div div
- link_path = Routes.image_path(@conn, :show, @comment.image) <> "#comment_#{@comment.id}" - link_path = Routes.image_path(@conn, :show, @comment.image) <> "#comment_#{@comment.id}"
- safe_author = PhilomenaWeb.PostView.textile_safe_author(@comment) - safe_author = PhilomenaWeb.PostView.markdown_safe_author(@comment)
- quote_body = if @comment.hidden_from_users, do: "", else: @comment.body - quote_body = if @comment.hidden_from_users, do: "", else: @comment.body
a.communication__interaction title="Link to comment" href=link_path a.communication__interaction title="Link to comment" href=link_path

View file

@ -18,7 +18,7 @@ div
div div
- link_path = Routes.forum_topic_path(@conn, :show, @post.topic.forum, @post.topic, post_id: @post.id) <> "#post_#{@post.id}" - link_path = Routes.forum_topic_path(@conn, :show, @post.topic.forum, @post.topic, post_id: @post.id) <> "#post_#{@post.id}"
- safe_author = textile_safe_author(@post) - safe_author = markdown_safe_author(@post)
- quote_body = if @post.hidden_from_users, do: "", else: @post.body - quote_body = if @post.hidden_from_users, do: "", else: @post.body
a.communication__interaction title="Link to post" href=link_path a.communication__interaction title="Link to post" href=link_path

View file

@ -1,6 +1,5 @@
defmodule PhilomenaWeb.TextRenderer do defmodule PhilomenaWeb.TextRenderer do
alias PhilomenaWeb.MarkdownRenderer alias PhilomenaWeb.MarkdownRenderer
alias PhilomenaWeb.TextileMarkdownRenderer
def render_one(item, conn) do def render_one(item, conn) do
hd(render_collection([item], conn)) hd(render_collection([item], conn))
@ -8,12 +7,7 @@ defmodule PhilomenaWeb.TextRenderer do
def render_collection(items, conn) do def render_collection(items, conn) do
Enum.map(items, fn item -> Enum.map(items, fn item ->
if Map.has_key?(item, :body_md) && item.body_md != nil && item.body_md != "" do MarkdownRenderer.render(item.body, conn)
MarkdownRenderer.render(item.body_md, conn)
else
markdown = TextileMarkdownRenderer.render_one(item)
MarkdownRenderer.render(markdown, conn)
end
end) end)
end end
end end

View file

@ -1,22 +0,0 @@
defmodule PhilomenaWeb.TextileMarkdownRenderer do
alias Philomena.Textile.ParserMarkdown
def render_one(post) do
hd(render_collection([post]))
end
def render_collection(posts) do
opts = %{image_transform: &Camo.Image.image_url/1}
parsed = Enum.map(posts, &ParserMarkdown.parse(opts, &1.body))
parsed
|> Enum.map(fn tree ->
tree
|> Enum.map(fn
{_k, text} ->
text
end)
|> Enum.join()
end)
end
end

View file

@ -1,134 +0,0 @@
defmodule PhilomenaWeb.TextileRenderer do
alias Philomena.Textile.Parser
alias Philomena.Images.Image
alias Philomena.Repo
import Phoenix.HTML
import Phoenix.HTML.Link
import Ecto.Query
# Kill bogus compile time dependency on ImageView
@image_view Module.concat(["PhilomenaWeb.ImageView"])
def render(text, conn) do
opts = %{image_transform: &Camo.Image.image_url/1}
parsed = Parser.parse(opts, text)
images =
parsed
|> Enum.flat_map(fn
{:text, text} ->
[text]
_ ->
[]
end)
|> find_images
parsed
|> Enum.map(fn
{:text, text} ->
text
|> replacement_entities()
|> replacement_images(conn, images)
{_k, markup} ->
markup
end)
|> Enum.join()
end
defp replacement_entities(t) do
t
|> String.replace("-&gt;", "&rarr;")
|> String.replace("--", "&mdash;")
|> String.replace("...", "&hellip;")
|> String.replace(~r|(\s)-(\s)|, "\\1&mdash;\\2")
|> String.replace("(tm)", "&trade;")
|> String.replace("(c)", "&copy;")
|> String.replace("(r)", "&reg;")
|> String.replace("&apos;", "&rsquo;")
end
defp replacement_images(t, conn, images) do
t
|> String.replace(~r|&gt;&gt;(\d+)([pts])?|, fn match ->
# Stupid, but the method doesn't give us capture group information
match_data = Regex.run(~r|&gt;&gt;(\d+)([pts])?|, match, capture: :all_but_first)
[image_id | rest] = match_data
image = images[String.to_integer(image_id)]
case [image | rest] do
[nil, _] ->
match
[nil] ->
match
[image, "p"] when not image.hidden_from_users ->
Phoenix.View.render(@image_view, "_image_target.html",
image: image,
size: :medium,
conn: conn
)
|> safe_to_string()
[image, "t"] when not image.hidden_from_users ->
Phoenix.View.render(@image_view, "_image_target.html",
image: image,
size: :small,
conn: conn
)
|> safe_to_string()
[image, "s"] when not image.hidden_from_users ->
Phoenix.View.render(@image_view, "_image_target.html",
image: image,
size: :thumb_small,
conn: conn
)
|> safe_to_string()
[image, suffix] when suffix in ["p", "t", "s"] ->
link(">>#{image.id}#{suffix}#{link_postfix(image)}", to: "/images/#{image.id}")
|> safe_to_string()
[image] ->
link(">>#{image.id}#{link_postfix(image)}", to: "/images/#{image.id}")
|> safe_to_string()
end
end)
end
defp find_images(text_segments) do
text_segments
|> Enum.flat_map(fn t ->
Regex.scan(~r|&gt;&gt;(\d+)|, t, capture: :all_but_first)
|> Enum.map(fn [first] -> String.to_integer(first) end)
|> Enum.filter(&(&1 < 2_147_483_647))
end)
|> load_images()
end
defp load_images([]), do: %{}
defp load_images(ids) do
Image
|> where([i], i.id in ^ids)
|> preload(tags: :aliases)
|> Repo.all()
|> Map.new(&{&1.id, &1})
end
defp link_postfix(image) do
cond do
not is_nil(image.duplicate_id) ->
" (merged)"
image.hidden_from_users ->
" (deleted)"
true ->
""
end
end
end

View file

@ -1,27 +1,10 @@
defmodule PhilomenaWeb.PostView do defmodule PhilomenaWeb.PostView do
alias Philomena.Attribution alias Philomena.Attribution
alias Philomena.Textile.Parser
use PhilomenaWeb, :view use PhilomenaWeb, :view
def textile_safe_author(object) do def markdown_safe_author(object) do
author_name = author_name(object) Philomena.Markdown.escape("@" <> author_name(object))
at_author_name = "@" <> author_name
Parser.parse(%{image_transform: & &1}, at_author_name)
|> Parser.flatten()
|> case do
^at_author_name ->
author_name
_ ->
# Cover *all* possibilities.
literal =
author_name
|> String.replace("==]", "==]==][==")
"[==#{literal}==]"
end
end end
defp author_name(object) do defp author_name(object) do

View file

@ -0,0 +1,116 @@
defmodule Philomena.Repo.Migrations.RenameBodyFields do
use Ecto.Migration
def change do
# Rename textile fields to *_textile,
# while putting Markdown fields in their place.
rename table("comments"), :body, to: :body_textile
rename table("comments"), :body_md, to: :body
rename table("messages"), :body, to: :body_textile
rename table("messages"), :body_md, to: :body
rename table("mod_notes"), :body, to: :body_textile
rename table("mod_notes"), :body_md, to: :body
rename table("posts"), :body, to: :body_textile
rename table("posts"), :body_md, to: :body
rename table("commission_items"), :description, to: :description_textile
rename table("commission_items"), :add_ons, to: :add_ons_textile
rename table("commission_items"), :description_md, to: :description
rename table("commission_items"), :add_ons_md, to: :add_ons
rename table("images"), :description, to: :description_textile
rename table("images"), :scratchpad, to: :scratchpad_textile
rename table("images"), :description_md, to: :description
rename table("images"), :scratchpad_md, to: :scratchpad
rename table("tags"), :description, to: :description_textile
rename table("tags"), :description_md, to: :description
rename table("users"), :description, to: :description_textile
rename table("users"), :scratchpad, to: :scratchpad_textile
rename table("users"), :description_md, to: :description
rename table("users"), :scratchpad_md, to: :scratchpad
rename table("dnp_entries"), :conditions, to: :conditions_textile
rename table("dnp_entries"), :reason, to: :reason_textile
rename table("dnp_entries"), :instructions, to: :instructions_textile
rename table("dnp_entries"), :conditions_md, to: :conditions
rename table("dnp_entries"), :reason_md, to: :reason
rename table("dnp_entries"), :instructions_md, to: :instructions
rename table("commissions"), :information, to: :information_textile
rename table("commissions"), :contact, to: :contact_textile
rename table("commissions"), :will_create, to: :will_create_textile
rename table("commissions"), :will_not_create, to: :will_not_create_textile
rename table("commissions"), :information_md, to: :information
rename table("commissions"), :contact_md, to: :contact
rename table("commissions"), :will_create_md, to: :will_create
rename table("commissions"), :will_not_create_md, to: :will_not_create
rename table("reports"), :reason, to: :reason_textile
rename table("reports"), :reason_md, to: :reason
# Change constraints
alter table("comments") do
modify :body_textile, :varchar, default: ""
modify :body, :varchar, null: false
end
alter table("posts") do
modify :body_textile, :varchar, default: ""
modify :body, :varchar, null: false
end
alter table("messages") do
modify :body_textile, :varchar, default: ""
modify :body, :varchar, null: false
end
alter table("mod_notes") do
modify :body_textile, :text, default: ""
modify :body, :varchar, null: false
end
alter table("dnp_entries") do
modify :reason_textile, :varchar, default: ""
modify :reason, :varchar, null: false
modify :conditions_textile, :varchar, default: ""
modify :conditions, :varchar, null: false
modify :instructions_textile, :varchar, default: ""
modify :instructions, :varchar, null: false
end
alter table("reports") do
modify :reason_textile, :varchar, default: ""
modify :reason, :varchar, null: false
end
execute("update images set description='' where description is null;")
execute("update tags set description='' where description is null;")
execute("alter table images alter column description set default ''::character varying, alter column description set not null;")
execute("alter table tags alter column description set default ''::character varying, alter column description set not null;")
# Unneeded columns
alter table("badges") do
remove :description_md, :varchar, default: nil
end
alter table("channels") do
remove :description, :varchar, default: ""
remove :description_md, :varchar, default: ""
end
alter table("filters") do
remove :description_md, :varchar, default: nil
end
alter table("galleries") do
remove :description_md, :varchar, default: nil
end
end
end

View file

@ -19,7 +19,7 @@
} }
], ],
"remote_images": [{ "remote_images": [{
"url": "https://derpicdn.net/img/view/2015/9/26/988000.gif", "url": "https://derpicdn.net/img/2015/9/26/988000/thumb.gif",
"source_url": "https://derpibooru.org/988000", "source_url": "https://derpibooru.org/988000",
"description": "Fairly large GIF (~23MB), use to test WebM stuff.", "description": "Fairly large GIF (~23MB), use to test WebM stuff.",
"tag_input": "alicorn, angry, animated, art, artist:assasinmonkey, artist:equum_amici, badass, barrier, crying, dark, epic, female, fight, force field, glare, glow, good vs evil, lord tirek, low angle, magic, mare, messy mane, metal as fuck, perspective, plot, pony, raised hoof, safe, size difference, spread wings, stomping, twilight's kingdom, twilight sparkle, twilight sparkle (alicorn), twilight vs tirek, underhoof" "tag_input": "alicorn, angry, animated, art, artist:assasinmonkey, artist:equum_amici, badass, barrier, crying, dark, epic, female, fight, force field, glare, glow, good vs evil, lord tirek, low angle, magic, mare, messy mane, metal as fuck, perspective, plot, pony, raised hoof, safe, size difference, spread wings, stomping, twilight's kingdom, twilight sparkle, twilight sparkle (alicorn), twilight vs tirek, underhoof"
@ -51,9 +51,10 @@
} }
], ],
"comments": [ "comments": [
"bold is *bold*, italic is _italic_, spoiler is [spoiler]spoiler[/spoiler], code is @code@, underline is +underline+, strike is -strike-, sup is ^sup^, sub is ~sub~.", "bold is **bold**, italic is _italic_, spoiler is ||spoiler||, code is `code`, underline is __underline__, strike is ~~strike~~, sup is ^sup^, sub is %sub%.",
"inline embedded thumbnails (tsp): >>1t >>1s >>1p", "inline embedded thumbnails (tsp): >>1t >>1s >>1p",
"buggy embedded image inside a spoiler: [spoiler]who needs it anyway >>1s[/spoiler]" "embedded image inside a spoiler: ||who needs it anyway >>1s||",
"spoilers inside of a table\n\nHello | World\n--- | ---:\n`||cool beans!||` | ||cool beans!||"
], ],
"forum_posts": [{ "forum_posts": [{
"dis": [{ "dis": [{

View file

@ -165,8 +165,7 @@ CREATE TABLE public.badges (
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
disable_award boolean DEFAULT false NOT NULL, disable_award boolean DEFAULT false NOT NULL,
priority boolean DEFAULT false, priority boolean DEFAULT false
description_md character varying
); );
@ -207,7 +206,6 @@ CREATE TABLE public.channels (
id integer NOT NULL, id integer NOT NULL,
short_name character varying NOT NULL, short_name character varying NOT NULL,
title character varying NOT NULL, title character varying NOT NULL,
description character varying,
channel_image character varying, channel_image character varying,
tags character varying, tags character varying,
viewers integer DEFAULT 0 NOT NULL, viewers integer DEFAULT 0 NOT NULL,
@ -228,8 +226,7 @@ CREATE TABLE public.channels (
total_viewer_minutes integer DEFAULT 0 NOT NULL, total_viewer_minutes integer DEFAULT 0 NOT NULL,
banner_image character varying, banner_image character varying,
remote_stream_id integer, remote_stream_id integer,
thumbnail_url character varying DEFAULT ''::character varying, thumbnail_url character varying DEFAULT ''::character varying
description_md character varying
); );
@ -258,7 +255,7 @@ ALTER SEQUENCE public.channels_id_seq OWNED BY public.channels.id;
CREATE TABLE public.comments ( CREATE TABLE public.comments (
id integer NOT NULL, id integer NOT NULL,
body character varying NOT NULL, body_textile character varying DEFAULT ''::character varying NOT NULL,
ip inet, ip inet,
fingerprint character varying, fingerprint character varying,
user_agent character varying DEFAULT ''::character varying, user_agent character varying DEFAULT ''::character varying,
@ -275,7 +272,7 @@ CREATE TABLE public.comments (
deletion_reason character varying DEFAULT ''::character varying NOT NULL, deletion_reason character varying DEFAULT ''::character varying NOT NULL,
destroyed_content boolean DEFAULT false, destroyed_content boolean DEFAULT false,
name_at_post_time character varying, name_at_post_time character varying,
body_md character varying body character varying NOT NULL
); );
@ -306,14 +303,14 @@ CREATE TABLE public.commission_items (
id integer NOT NULL, id integer NOT NULL,
commission_id integer, commission_id integer,
item_type character varying, item_type character varying,
description character varying, description_textile character varying,
base_price numeric, base_price numeric,
add_ons character varying, add_ons_textile character varying,
example_image_id integer, example_image_id integer,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
description_md character varying, description character varying,
add_ons_md character varying add_ons character varying
); );
@ -345,18 +342,18 @@ CREATE TABLE public.commissions (
user_id integer NOT NULL, user_id integer NOT NULL,
open boolean NOT NULL, open boolean NOT NULL,
categories character varying[] DEFAULT '{}'::character varying[] NOT NULL, categories character varying[] DEFAULT '{}'::character varying[] NOT NULL,
information character varying, information_textile character varying,
contact character varying, contact_textile character varying,
sheet_image_id integer, sheet_image_id integer,
will_create character varying, will_create_textile character varying,
will_not_create character varying, will_not_create_textile character varying,
commission_items_count integer DEFAULT 0 NOT NULL, commission_items_count integer DEFAULT 0 NOT NULL,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
information_md character varying, information character varying,
contact_md character varying, contact character varying,
will_create_md character varying, will_create character varying,
will_not_create_md character varying will_not_create character varying
); );
@ -429,16 +426,16 @@ CREATE TABLE public.dnp_entries (
tag_id integer NOT NULL, tag_id integer NOT NULL,
aasm_state character varying DEFAULT 'requested'::character varying NOT NULL, aasm_state character varying DEFAULT 'requested'::character varying NOT NULL,
dnp_type character varying NOT NULL, dnp_type character varying NOT NULL,
conditions character varying NOT NULL, conditions_textile character varying DEFAULT ''::character varying NOT NULL,
reason character varying NOT NULL, reason_textile character varying DEFAULT ''::character varying NOT NULL,
hide_reason boolean DEFAULT false NOT NULL, hide_reason boolean DEFAULT false NOT NULL,
instructions character varying NOT NULL, instructions_textile character varying DEFAULT ''::character varying NOT NULL,
feedback character varying NOT NULL, feedback character varying NOT NULL,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
conditions_md character varying, conditions character varying NOT NULL,
reason_md character varying, reason character varying NOT NULL,
instructions_md character varying instructions character varying NOT NULL
); );
@ -551,8 +548,7 @@ CREATE TABLE public.filters (
user_count integer DEFAULT 0 NOT NULL, user_count integer DEFAULT 0 NOT NULL,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
user_id integer, user_id integer
description_md character varying
); );
@ -679,8 +675,7 @@ CREATE TABLE public.galleries (
watcher_ids integer[] DEFAULT '{}'::integer[] NOT NULL, watcher_ids integer[] DEFAULT '{}'::integer[] NOT NULL,
watcher_count integer DEFAULT 0 NOT NULL, watcher_count integer DEFAULT 0 NOT NULL,
image_count integer DEFAULT 0 NOT NULL, image_count integer DEFAULT 0 NOT NULL,
order_position_asc boolean DEFAULT false NOT NULL, order_position_asc boolean DEFAULT false NOT NULL
description_md character varying
); );
@ -931,7 +926,7 @@ CREATE TABLE public.images (
watcher_ids integer[] DEFAULT '{}'::integer[] NOT NULL, watcher_ids integer[] DEFAULT '{}'::integer[] NOT NULL,
watcher_count integer DEFAULT 0 NOT NULL, watcher_count integer DEFAULT 0 NOT NULL,
source_url character varying, source_url character varying,
description character varying DEFAULT ''::character varying NOT NULL, description_textile character varying DEFAULT ''::character varying NOT NULL,
image_sha512_hash character varying, image_sha512_hash character varying,
image_orig_sha512_hash character varying, image_orig_sha512_hash character varying,
deletion_reason character varying, deletion_reason character varying,
@ -962,11 +957,11 @@ CREATE TABLE public.images (
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
destroyed_content boolean DEFAULT false NOT NULL, destroyed_content boolean DEFAULT false NOT NULL,
hidden_image_key character varying, hidden_image_key character varying,
scratchpad character varying, scratchpad_textile character varying,
hides_count integer DEFAULT 0 NOT NULL, hides_count integer DEFAULT 0 NOT NULL,
image_duration double precision, image_duration double precision,
description_md character varying, description character varying DEFAULT ''::character varying NOT NULL,
scratchpad_md character varying scratchpad character varying
); );
@ -995,12 +990,12 @@ ALTER SEQUENCE public.images_id_seq OWNED BY public.images.id;
CREATE TABLE public.messages ( CREATE TABLE public.messages (
id integer NOT NULL, id integer NOT NULL,
body character varying NOT NULL, body_textile character varying DEFAULT ''::character varying NOT NULL,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
from_id integer NOT NULL, from_id integer NOT NULL,
conversation_id integer NOT NULL, conversation_id integer NOT NULL,
body_md character varying body character varying NOT NULL
); );
@ -1032,11 +1027,11 @@ CREATE TABLE public.mod_notes (
moderator_id integer NOT NULL, moderator_id integer NOT NULL,
notable_id integer NOT NULL, notable_id integer NOT NULL,
notable_type character varying NOT NULL, notable_type character varying NOT NULL,
body text NOT NULL, body_textile text DEFAULT ''::text NOT NULL,
deleted boolean DEFAULT false NOT NULL, deleted boolean DEFAULT false NOT NULL,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
body_md character varying body character varying NOT NULL
); );
@ -1202,7 +1197,7 @@ ALTER SEQUENCE public.polls_id_seq OWNED BY public.polls.id;
CREATE TABLE public.posts ( CREATE TABLE public.posts (
id integer NOT NULL, id integer NOT NULL,
body character varying NOT NULL, body_textile character varying DEFAULT ''::character varying NOT NULL,
edit_reason character varying, edit_reason character varying,
ip inet, ip inet,
fingerprint character varying, fingerprint character varying,
@ -1220,7 +1215,7 @@ CREATE TABLE public.posts (
deletion_reason character varying DEFAULT ''::character varying NOT NULL, deletion_reason character varying DEFAULT ''::character varying NOT NULL,
destroyed_content boolean DEFAULT false NOT NULL, destroyed_content boolean DEFAULT false NOT NULL,
name_at_post_time character varying, name_at_post_time character varying,
body_md character varying body character varying NOT NULL
); );
@ -1253,7 +1248,7 @@ CREATE TABLE public.reports (
fingerprint character varying, fingerprint character varying,
user_agent character varying DEFAULT ''::character varying, user_agent character varying DEFAULT ''::character varying,
referrer character varying DEFAULT ''::character varying, referrer character varying DEFAULT ''::character varying,
reason character varying NOT NULL, reason_textile character varying DEFAULT ''::character varying NOT NULL,
state character varying DEFAULT 'open'::character varying NOT NULL, state character varying DEFAULT 'open'::character varying NOT NULL,
open boolean DEFAULT true NOT NULL, open boolean DEFAULT true NOT NULL,
created_at timestamp without time zone NOT NULL, created_at timestamp without time zone NOT NULL,
@ -1262,7 +1257,7 @@ CREATE TABLE public.reports (
admin_id integer, admin_id integer,
reportable_id integer NOT NULL, reportable_id integer NOT NULL,
reportable_type character varying NOT NULL, reportable_type character varying NOT NULL,
reason_md character varying reason character varying NOT NULL
); );
@ -1557,7 +1552,7 @@ CREATE TABLE public.tags (
id integer NOT NULL, id integer NOT NULL,
name character varying NOT NULL, name character varying NOT NULL,
slug character varying NOT NULL, slug character varying NOT NULL,
description character varying DEFAULT ''::character varying, description_textile character varying DEFAULT ''::character varying,
short_description character varying DEFAULT ''::character varying, short_description character varying DEFAULT ''::character varying,
namespace character varying, namespace character varying,
name_in_namespace character varying, name_in_namespace character varying,
@ -1570,7 +1565,7 @@ CREATE TABLE public.tags (
updated_at timestamp without time zone NOT NULL, updated_at timestamp without time zone NOT NULL,
category character varying, category character varying,
mod_notes character varying, mod_notes character varying,
description_md character varying description character varying DEFAULT ''::character varying NOT NULL
); );
@ -1952,7 +1947,7 @@ CREATE TABLE public.users (
name character varying NOT NULL, name character varying NOT NULL,
slug character varying NOT NULL, slug character varying NOT NULL,
role character varying DEFAULT 'user'::character varying NOT NULL, role character varying DEFAULT 'user'::character varying NOT NULL,
description character varying, description_textile character varying,
avatar character varying, avatar character varying,
spoiler_type character varying DEFAULT 'static'::character varying NOT NULL, spoiler_type character varying DEFAULT 'static'::character varying NOT NULL,
theme character varying DEFAULT 'default'::character varying NOT NULL, theme character varying DEFAULT 'default'::character varying NOT NULL,
@ -1991,7 +1986,7 @@ CREATE TABLE public.users (
metadata_updates_count integer DEFAULT 0 NOT NULL, metadata_updates_count integer DEFAULT 0 NOT NULL,
images_favourited_count integer DEFAULT 0 NOT NULL, images_favourited_count integer DEFAULT 0 NOT NULL,
last_donation_at timestamp without time zone, last_donation_at timestamp without time zone,
scratchpad text, scratchpad_textile text,
use_centered_layout boolean DEFAULT true NOT NULL, use_centered_layout boolean DEFAULT true NOT NULL,
secondary_role character varying, secondary_role character varying,
hide_default_role boolean DEFAULT false NOT NULL, hide_default_role boolean DEFAULT false NOT NULL,
@ -2009,8 +2004,8 @@ CREATE TABLE public.users (
forced_filter_id bigint, forced_filter_id bigint,
confirmed_at timestamp(0) without time zone, confirmed_at timestamp(0) without time zone,
senior_staff boolean DEFAULT false, senior_staff boolean DEFAULT false,
description_md character varying, description character varying,
scratchpad_md character varying, scratchpad character varying,
bypass_rate_limits boolean DEFAULT false, bypass_rate_limits boolean DEFAULT false,
scale_large_images character varying(255) DEFAULT 'true'::character varying NOT NULL scale_large_images character varying(255) DEFAULT 'true'::character varying NOT NULL
); );
@ -4871,3 +4866,4 @@ INSERT INTO public."schema_migrations" (version) VALUES (20210427022351);
INSERT INTO public."schema_migrations" (version) VALUES (20210912171343); INSERT INTO public."schema_migrations" (version) VALUES (20210912171343);
INSERT INTO public."schema_migrations" (version) VALUES (20210917190346); INSERT INTO public."schema_migrations" (version) VALUES (20210917190346);
INSERT INTO public."schema_migrations" (version) VALUES (20210921025336); INSERT INTO public."schema_migrations" (version) VALUES (20210921025336);
INSERT INTO public."schema_migrations" (version) VALUES (20210929181319);