hopefully more correct fix for textile shenanigans

This commit is contained in:
byte[] 2019-11-27 16:38:53 -05:00
parent fa6be88874
commit 9dbdad0d49
2 changed files with 18 additions and 19 deletions

View file

@ -122,23 +122,22 @@ defmodule Textile.Lexer do
{link_markup_start, link_markup_element} = markup_ending_in(string("\"")) {link_markup_start, link_markup_element} = markup_ending_in(string("\""))
link_stop = link_url_stop =
repeat( choice([
choice([ string("*"),
string("*"), string("_"),
string("_"), string("@"),
string("@"), string("+"),
string("+"), string("^"),
string("^"), string("-"),
string("-"), string("~"),
string("~"), string("."),
string("."), string("?"),
string("?"), string("!"),
string("!"), string(","),
string(","), space(),
]) eos()
) ])
|> choice([space(), eos()])
link_contents_start = link_contents_start =
choice([ choice([
@ -175,7 +174,7 @@ defmodule Textile.Lexer do
string("\":") string("\":")
|> unwrap_and_tag(:link_end) |> unwrap_and_tag(:link_end)
|> concat( |> concat(
url_ending_in(link_stop) url_ending_in(link_url_stop)
|> unwrap_and_tag(:link_url) |> unwrap_and_tag(:link_url)
) )

View file

@ -19,7 +19,7 @@ defmodule Textile.UrlLexer do
]) ])
scheme_and_domain scheme_and_domain
|> repeat(utf8_char([]) |> lookahead_not(ending_sequence)) |> repeat(lookahead_not(ending_sequence) |> utf8_char([]))
|> reduce({List, :to_string, []}) |> reduce({List, :to_string, []})
end end
end end