From 939a5a19a9310edd0212b5e9f8ca2590145eb82a Mon Sep 17 00:00:00 2001 From: Juan Facorro Date: Thu, 15 Jun 2017 00:21:53 +0200 Subject: [PATCH] [#32] Specify utf8 encoding for all binaries built in lexer --- lib/eden/lexer.ex | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/lib/eden/lexer.ex b/lib/eden/lexer.ex index 20c329d..7628ff3 100644 --- a/lib/eden/lexer.ex +++ b/lib/eden/lexer.ex @@ -56,8 +56,8 @@ defmodule Eden.Lexer do start_token(state, :comment, token, ";", rest) end defp _tokenize(state = %{state: :comment}, <>) - when <> in ["\n", "\r"] do - end_token(state, <>, rest) + when <> in ["\n", "\r"] do + end_token(state, <>, rest) end defp _tokenize(state = %{state: :comment}, <<";" :: utf8, rest :: binary>>) do skip_char(state, ";", rest) @@ -80,7 +80,7 @@ defmodule Eden.Lexer do start_token(state, :check_literal, token, "false", rest) end defp _tokenize(state = %{state: :check_literal}, <> = input) do - if separator?(<>) do + if separator?(<>) do end_token(state, "", input) else token = token(:symbol, state.current.value) @@ -108,8 +108,8 @@ defmodule Eden.Lexer do # Character defp _tokenize(state = %{state: :new}, <<"\\" :: utf8, char :: utf8, rest :: binary>>) do - token = token(:character, <>) - end_token(state, token, "\\" <> <>, rest) + token = token(:character, <>) + end_token(state, token, "\\" <> <>, rest) end # Keyword and Symbol @@ -125,7 +125,7 @@ defmodule Eden.Lexer do end end defp _tokenize(state = %{state: :symbol}, <> = input) do - if symbol_char?(<>) do + if symbol_char?(<>) do consume_char(state, <>, rest) else end_token(state, "", input) @@ -157,24 +157,24 @@ defmodule Eden.Lexer do start_token(state, :fraction, token, ".", rest) end defp _tokenize(state = %{state: :number}, <>) - when <> in ["e", "E"] do - state = append_to_current(state, <>) + when <> in ["e", "E"] do + state = append_to_current(state, <>) token = token(:float, state.current.value) - start_token(state, :exponent, token, <>, rest) + start_token(state, :exponent, token, <>, rest) end defp _tokenize(state = %{state: s}, <> = input) when s in [:number, :exponent, :fraction] do cond do - digit?(<>) -> + digit?(<>) -> state |> set_state(:number) |> consume_char(<>, rest) - s in [:exponent, :fraction] and separator?(<>) -> + s in [:exponent, :fraction] and separator?(<>) -> raise Ex.UnfinishedTokenError, state.current - separator?(<>) -> + separator?(<>) -> end_token(state, "", input) true -> - raise Ex.UnexpectedInputError, <> + raise Ex.UnexpectedInputError, <> end end @@ -211,20 +211,20 @@ defmodule Eden.Lexer do # Symbol, Integer or Invalid input defp _tokenize(state = %{state: :new}, <>) do cond do - alpha?(<>) -> - token = token(:symbol, <>) - start_token(state, :symbol, token, <>, rest) - digit?(<>) -> - token = token(:integer, <>) - start_token(state, :number, token, <>, rest) + alpha?(<>) -> + token = token(:symbol, <>) + start_token(state, :symbol, token, <>, rest) + digit?(<>) -> + token = token(:integer, <>) + start_token(state, :number, token, <>, rest) true -> - raise Ex.UnexpectedInputError, <> + raise Ex.UnexpectedInputError, <> end end # Unexpected Input defp _tokenize(_, <>) do - raise Ex.UnexpectedInputError, <> + raise Ex.UnexpectedInputError, <> end ##############################################################################