Skip to content

Commit

Permalink
Switch from 'fast' string functions to normal string functions
Browse files Browse the repository at this point in the history
  • Loading branch information
rvs314 committed Oct 18, 2023
1 parent 6c35157 commit 74bc0e7
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion libs/contrib/Text/Distance/Levenshtein.idr
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import Data.List.Extra

||| Self-evidently correct but O(3 ^ (min mn)) complexity
spec : String -> String -> Nat
spec a b = loop (fastUnpack a) (fastUnpack b) where
spec a b = loop (unpack a) (unpack b) where

loop : List Char -> List Char -> Nat
loop [] ys = length ys -- deletions
Expand Down
6 changes: 3 additions & 3 deletions libs/contrib/Text/Lexer/Core.idr
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ tokenise pred line col acc tmap str
Just (tok, rest) =>
let line' = line + cast (countNLs tok)
col' = getCols tok col in
Just (MkBounded (fn (fastPack (reverse tok))) False (MkBounds line col line' col'),
Just (MkBounded (fn (pack (reverse tok))) False (MkBounds line col line' col'),
line', col', rest)
Nothing => getFirstToken ts str

Expand All @@ -171,11 +171,11 @@ export
lex : TokenMap a -> String -> (List (WithBounds a), (Int, Int, String))
lex tmap str
= let (ts, (l, c, str')) = tokenise (const False) 0 0 [] tmap (unpack str) in
(ts, (l, c, fastPack str'))
(ts, (l, c, pack str'))

export
lexTo : (a -> Bool) ->
TokenMap a -> String -> (List (WithBounds a), (Int, Int, String))
lexTo pred tmap str
= let (ts, (l, c, str')) = tokenise pred 0 0 [] tmap (unpack str) in
(ts, (l, c, fastPack str'))
(ts, (l, c, pack str'))
6 changes: 3 additions & 3 deletions libs/contrib/Text/Lexer/Tokenizer.idr
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ tokenise reject tokenizer line col acc str
| _ => Nothing
line' = line + cast (countNLs token)
col' = getCols token col
tokenStr = fastPack $ reverse token
tokenStr = pack $ reverse token
in pure (tokenStr, line', col', rest)

getFirstMatch : Tokenizer a -> List Char ->
Expand Down Expand Up @@ -149,8 +149,8 @@ lexTo : Lexer ->
(List (WithBounds a), (StopReason, Int, Int, String))
lexTo reject tokenizer str
= let (ts, reason, (l, c, str')) =
tokenise reject tokenizer 0 0 [] (fastUnpack str) in
(ts, reason, (l, c, fastPack str'))
tokenise reject tokenizer 0 0 [] (unpack str) in
(ts, reason, (l, c, pack str'))

||| Given a tokenizer and an input string, return a list of recognised tokens,
||| and the line, column, and remainder of the input at the first point in the string
Expand Down
2 changes: 1 addition & 1 deletion libs/contrib/Text/Literate.idr
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ namespace Compat

||| Merge the tokens into a single source file.
reduce : List (WithBounds Token) -> List String -> String
reduce [] acc = fastConcat (reverse acc)
reduce [] acc = concat (reverse acc)
reduce (MkBounded (Any x) _ _ :: rest) acc =
-- newline will always be tokenized as a single token
if x == "\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import Data.String

export
htmlEscape : String -> String
htmlEscape s = fastConcat $ reverse $ go [] s
htmlEscape s = concat $ reverse $ go [] s
where
isSafe : Char -> Bool
isSafe '"' = False
Expand Down

0 comments on commit 74bc0e7

Please sign in to comment.