A go template renderer based on Perl's Template Toolkit
1package gott
2
3// TokenType represents the type of a lexical token
4type TokenType int
5
6const (
7 TokenError TokenType = iota // error occurred
8 TokenEOF // end of input
9
10 // Literals
11 TokenText // raw text outside [% %]
12 TokenIdent // identifier (variable name, block name)
13 TokenString // "string" or 'string'
14 TokenNumber // 123 or 45.67
15
16 // Delimiters
17 TokenTagOpen // [%
18 TokenTagClose // %]
19 TokenLParen // (
20 TokenRParen // )
21 TokenDot // .
22 TokenPipe // |
23 TokenComma // ,
24 TokenAssign // =
25 TokenDollar // $ (variable interpolation in paths)
26
27 // Operators
28 TokenOr // ||
29 TokenAnd // &&
30 TokenEq // ==
31 TokenNe // !=
32 TokenLt // <
33 TokenLe // <=
34 TokenGt // >
35 TokenGe // >=
36 TokenPlus // +
37 TokenMinus // -
38 TokenMul // *
39 TokenDiv // /
40 TokenMod // % (modulo, only inside tags)
41
42 // Keywords
43 TokenIF
44 TokenELSIF
45 TokenELSE
46 TokenUNLESS
47 TokenEND
48 TokenFOREACH
49 TokenIN
50 TokenBLOCK
51 TokenINCLUDE
52 TokenWRAPPER
53 TokenSET
54 TokenTRY
55 TokenCATCH
56)
57
58// String returns a human-readable name for the token type
59func (t TokenType) String() string {
60 switch t {
61 case TokenError:
62 return "Error"
63 case TokenEOF:
64 return "EOF"
65 case TokenText:
66 return "Text"
67 case TokenIdent:
68 return "Ident"
69 case TokenString:
70 return "String"
71 case TokenNumber:
72 return "Number"
73 case TokenTagOpen:
74 return "[%"
75 case TokenTagClose:
76 return "%]"
77 case TokenLParen:
78 return "("
79 case TokenRParen:
80 return ")"
81 case TokenDot:
82 return "."
83 case TokenPipe:
84 return "|"
85 case TokenComma:
86 return ","
87 case TokenAssign:
88 return "="
89 case TokenDollar:
90 return "$"
91 case TokenOr:
92 return "||"
93 case TokenAnd:
94 return "&&"
95 case TokenEq:
96 return "=="
97 case TokenNe:
98 return "!="
99 case TokenLt:
100 return "<"
101 case TokenLe:
102 return "<="
103 case TokenGt:
104 return ">"
105 case TokenGe:
106 return ">="
107 case TokenPlus:
108 return "+"
109 case TokenMinus:
110 return "-"
111 case TokenMul:
112 return "*"
113 case TokenDiv:
114 return "/"
115 case TokenMod:
116 return "%"
117 case TokenIF:
118 return "IF"
119 case TokenELSIF:
120 return "ELSIF"
121 case TokenELSE:
122 return "ELSE"
123 case TokenUNLESS:
124 return "UNLESS"
125 case TokenEND:
126 return "END"
127 case TokenFOREACH:
128 return "FOREACH"
129 case TokenIN:
130 return "IN"
131 case TokenBLOCK:
132 return "BLOCK"
133 case TokenINCLUDE:
134 return "INCLUDE"
135 case TokenWRAPPER:
136 return "WRAPPER"
137 case TokenSET:
138 return "SET"
139 case TokenTRY:
140 return "TRY"
141 case TokenCATCH:
142 return "CATCH"
143 default:
144 return "Unknown"
145 }
146}
147
148// Position represents a location in the source template
149type Position struct {
150 Line int // 1-based line number
151 Column int // 1-based column number
152 Offset int // byte offset from start of input
153}
154
155// Token represents a lexical token with its type, value, and position
156type Token struct {
157 Type TokenType
158 Value string // literal value for idents, strings, numbers, text, errors
159 Pos Position // source position for error reporting
160}
161
162// keywords maps keyword strings to their token types
163var keywords = map[string]TokenType{
164 "IF": TokenIF,
165 "ELSIF": TokenELSIF,
166 "ELSE": TokenELSE,
167 "UNLESS": TokenUNLESS,
168 "END": TokenEND,
169 "FOREACH": TokenFOREACH,
170 "IN": TokenIN,
171 "BLOCK": TokenBLOCK,
172 "INCLUDE": TokenINCLUDE,
173 "WRAPPER": TokenWRAPPER,
174 "SET": TokenSET,
175 "TRY": TokenTRY,
176 "CATCH": TokenCATCH,
177}
178
179// LookupKeyword returns the token type for an identifier,
180// returning TokenIdent if it's not a keyword
181func LookupKeyword(ident string) TokenType {
182 if tok, ok := keywords[ident]; ok {
183 return tok
184 }
185 return TokenIdent
186}