aboutsummaryrefslogtreecommitdiff
path: root/compiler/elymasLexer.ey
blob: 8c17d61d13c87e3515652e11294a8b1bd3f6f618 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
"elymasGlobal.ey" include

<
  [ /0 /1 /2 /3 /4 /5 /6 /7 /8 /9 ] ==digits

  { 0 ==result
    { "(.)(.*)" regex } {
      { streq }_ digits -01 index result 10 mul add =result
    } loop
    result
  }
> -- /base10decode deff

<
  { assembler -01 . } ":" deff
  { assemblerLibrary -01 . } "::" deff
  assembler .|label "@" deff
  "%" _ : -01 deff

  { .value base10decode ==v
    [
      # allocate int
      16 /rdi :movqImmReg
      ::internalAllocate /rax :movqImmReg
      /rax :callqReg

      # push int address on program stack
      /rax :pushqReg

      # type zero does not need to be changed

      # load value
      8 /rax :addqImm8Reg
      v /rdx :movqImmReg
      /rdx /rax :movqRegMem
    ] :execute
  } /TOKINT

  { .value ::constStringCode :execute } /TOKSTR

  { .value ::constStringCode
    [
      global .internalExecuteIdentifier /rax :movqImmReg
      /rax :callqReg
    ] cat :execute
  } /TOKID
> -- 3 |defv rep

{ /f deff -101 /s defv regex { f } { s } ? * } /rxparse deff

{ " " cat
  { < /handle deff /value defv > } /token deff
  [ -01 { _ "" streq not } {
    0 /matched defv { /f deff matched { -- } { { 1 =matched f } rxparse } ? * } /parse deff

    "^ (.*)" { } parse
    "^#" { "" } parse
    "^(\\d+) +(.*)" { TOKINT token -01 } parse
    "^\"(.*)" {
      "" /str defv
      { _ "^\"(.*)" regex { -01 -- 0 } { 1 } ? * } {
        0 /strmatched defv { /f deff strmatched { -- } { { 1 =strmatched f } rxparse } ? * } /strparse deff

        "^\\\\\\\\(.*)" { str "\\" cat =str } strparse
        "^\\\\n(.*)" { str "\n" cat =str } strparse
        "^\\\\0(.*)" { str "\0" cat =str } strparse
        "^\\\\\"(.*)" { str "\"" cat =str } strparse
        "^([^\"\\\\])(.*)" { str -01 cat =str } strparse
        strmatched not { "Tokenization of string-like failed" die } rep
      } loop
      str TOKSTR token -01
    } parse
    "^([^a-zA-Z0-9 ]+)([a-zA-Z0-9][^ ]*) +(.*)" { -201 TOKSTR token " " -1203 cat cat } parse
    "^([a-zA-Z0-9]+|[^a-zA-Z0-9 ]+) +(.*)" { TOKID token -01 } parse

    matched not { "Tokenization failed: " -01 cat die } rep
  } loop -- ]
} /tokenize deff

{ /input defv
  "" {
    4096 input .readstr cat
    _ "" streq not
  } {
    { _ "([^\\n]*)\\n(.*)" regex } { -102 -- tokenize {
      _ .handle
      # assemblerLibrary .stackDump
      # assemblerLibrary .globalScopeDump
    } each } loop
  } loop --
} /executeFile deff

# vim: syn=elymas