diff options
| author | Drahflow <drahflow@gmx.de> | 2013-08-06 23:09:08 +0200 |
|---|---|---|
| committer | Drahflow <drahflow@gmx.de> | 2013-08-06 23:09:08 +0200 |
| commit | a0f5e44d1cd8f3d4b12463bf9d0401f0d5623057 (patch) | |
| tree | 6d09c6a94ed406b8335c2d0678857bca11fba6c3 /compiler/elymasTokenize.ey | |
| parent | 92d3a212d4f9afe21914080de81f23d1878f6685 (diff) | |
Towards more efficient "include"
Diffstat (limited to 'compiler/elymasTokenize.ey')
| -rw-r--r-- | compiler/elymasTokenize.ey | 24 |
1 files changed, 12 insertions, 12 deletions
diff --git a/compiler/elymasTokenize.ey b/compiler/elymasTokenize.ey index 347a81b..a44bcbe 100644 --- a/compiler/elymasTokenize.ey +++ b/compiler/elymasTokenize.ey @@ -10,7 +10,7 @@ } > -- /base10decode deff - { /f deff -101 /s defv regex { f } { s } ? * } /rxparse deff + { /f deff /re deff _ /s defv re { f } { s } ? * } /rxparse deff { ==TOKID ==TOKSTR ==TOKINT " " cat @@ -18,25 +18,25 @@ [ -01 { _ "" streq not } { 0 /matched defv { /f deff matched { -- } { { 1 =matched f } rxparse } ? * } /parse deff - "^ (.*)" { } parse - "^#" { "" } parse - "^(\\d+) +(.*)" { TOKINT token -01 } parse - "^\"(.*)" { + { "^ (.*)" regex } { } parse + { "^#" regex } { "" } parse + { "^(\\d+) +(.*)" regex } { TOKINT token -01 } parse + { "^\"(.*)" regex } { "" /str defv { _ "^\"(.*)" regex { -01 -- 0 } { 1 } ? * } { 0 /strmatched defv { /f deff strmatched { -- } { { 1 =strmatched f } rxparse } ? * } /strparse deff - "^\\\\\\\\(.*)" { str "\\" cat =str } strparse - "^\\\\n(.*)" { str "\n" cat =str } strparse - "^\\\\0(.*)" { str "\0" cat =str } strparse - "^\\\\\"(.*)" { str "\"" cat =str } strparse - "^([^\"\\\\])(.*)" { str -01 cat =str } strparse + { "^\\\\\\\\(.*)" regex } { str "\\" cat =str } strparse + { "^\\\\n(.*)" regex } { str "\n" cat =str } strparse + { "^\\\\0(.*)" regex } { str "\0" cat =str } strparse + { "^\\\\\"(.*)" regex } { str "\"" cat =str } strparse + { "^([^\"\\\\])(.*)" regex } { str -01 cat =str } strparse strmatched not { "Tokenization of string-like failed" die } rep } loop str TOKSTR token -01 } parse - "^([^a-zA-Z0-9 ]+)([a-zA-Z0-9][^ ]*) +(.*)" { -201 TOKSTR token " " -1203 cat cat } parse - "^([a-zA-Z0-9]+|[^a-zA-Z0-9 ]+) +(.*)" { TOKID token -01 } parse + { "^([^a-zA-Z0-9 ]+)([a-zA-Z0-9][^ ]*) +(.*)" regex } { -201 TOKSTR token " " -1203 cat cat } parse + { "^([a-zA-Z0-9]+|[^a-zA-Z0-9 ]+) +(.*)" regex } { TOKID token -01 } parse matched not { "Tokenization failed: " -01 cat die } rep } loop -- ] |
