Skip to content

Commit 00a7507

Browse files
committed
docs: update documentation
Signed-off-by: Ian Lewis <ian@ianlewis.org>
1 parent 9de2890 commit 00a7507

File tree

1 file changed

+25
-24
lines changed

1 file changed

+25
-24
lines changed

README.md

Lines changed: 25 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -130,9 +130,9 @@ in addition to the underlying reader's position. When the token has been fully
130130
processed it can be emitted to a channel for further processing by the `Parser`.
131131

132132
Developers implement the token processing portion of the lexer by implementing
133-
`LexState` interface for each relevant lexer state. A `CustomLexerCursor` is
134-
passed to each `LexState` during processing and includes a number of methods
135-
that can be used to advance through the input text.
133+
`LexState` interface for each relevant lexer state. A `LexCursor` is passed to
134+
each `LexState` during processing and includes a number of methods that can be
135+
used to advance through the input text.
136136

137137
For example, consider the following simple template language.
138138

@@ -211,7 +211,7 @@ type LexState interface {
211211
// Run returns the next state to transition to or an error. If the returned
212212
// next state is nil or the returned error is io.EOF then the Lexer
213213
// finishes processing normally.
214-
Run(context.Context, *CustomLexerCursor) (LexState, error)
214+
Run(context.Context, *LexCursor) (LexState, error)
215215
}
216216
```
217217

@@ -238,23 +238,23 @@ advancing over the text.
238238

239239
```go
240240
// lexText tokenizes normal text.
241-
func lexText(ctx context.Context, c *lexparse.CustomLexerCursor) (lexparse.LexState, error) {
241+
func lexText(ctx context.Context, cur *lexparse.LexCursor) (lexparse.LexState, error) {
242242
for {
243-
p := c.PeekN(2)
243+
p := cur.PeekN(2)
244244
switch string(p) {
245245
case tokenBlockStart, tokenVarStart:
246-
if c.Width() > 0 {
247-
c.Emit(lexTypeText)
246+
if cur.Width() > 0 {
247+
cur.Emit(lexTypeText)
248248
}
249249
return lexparse.LexStateFn(lexCode), nil
250250
default:
251251
}
252252

253253
// Advance the input.
254-
if !c.Advance() {
254+
if !cur.Advance() {
255255
// End of input. Emit the text up to this point.
256-
if c.Width() > 0 {
257-
c.Emit(lexTypeText)
256+
if cur.Width() > 0 {
257+
cur.Emit(lexTypeText)
258258
}
259259
return nil, nil
260260
}
@@ -326,7 +326,7 @@ flowchart-elk TD
326326

327327
Similar to the lexer API, each parser state is represented by an object
328328
implementing the `ParseState` interface. It contains only a single `Run` method
329-
which handles processing input tokens while in that state. A `ParserContext` is
329+
which handles processing input tokens while in that state. A `ParseCursor` is
330330
passed to each `ParseState` during processing and includes a number of methods
331331
that can be used to examine the current token, advance to the next token, and
332332
manipulate the AST.
@@ -335,10 +335,11 @@ manipulate the AST.
335335
// ParseState is the state of the current parsing state machine. It defines the
336336
// logic to process the current state and returns the next state.
337337
type ParseState[V comparable] interface {
338-
// Run returns the next state to transition to or an error. If the returned
339-
// next state is nil or the returned error is io.EOF then the Lexer
340-
// finishes processing normally.
341-
Run(*ParserContext[V]) (ParseState[V], error)
338+
// Run executes the logic at the current state, returning an error if one is
339+
// encountered. Implementations are expected to add new [Node] objects to
340+
// the AST using [Parser.Push] or [Parser.Node). As necessary, new parser
341+
// state should be pushed onto the stack as needed using [Parser.PushState].
342+
Run(ctx context.Context, cur *ParseCursor[V]) error
342343
}
343344
```
344345

@@ -377,16 +378,16 @@ Here we push the later relevant expected state onto the parser's stack.
377378

378379
```go
379380
// parseSeq delegates to another parse function based on token type.
380-
func parseSeq(ctx *lexparse.ParserContext[*tmplNode]) error {
381-
token := ctx.Peek()
381+
func parseSeq(ctx context.Context, cur *lexparse.ParseCursor[*tmplNode]) error {
382+
token := cur.Peek(ctx)
382383

383384
switch token.Type {
384385
case lexTypeText:
385-
ctx.PushState(lexparse.ParseStateFn(parseText))
386+
cur.PushState(lexparse.ParseStateFn(parseText))
386387
case lexTypeVarStart:
387-
ctx.PushState(lexparse.ParseStateFn(parseVarStart))
388+
cur.PushState(lexparse.ParseStateFn(parseVarStart))
388389
case lexTypeBlockStart:
389-
ctx.PushState(lexparse.ParseStateFn(parseBlockStart))
390+
cur.PushState(lexparse.ParseStateFn(parseBlockStart))
390391
}
391392

392393
return nil
@@ -399,11 +400,11 @@ are pushed in reverse order so that they are handled in the order listed.
399400

400401
```go
401402
// parseVarStart handles var start (e.g. '{{').
402-
func parseVarStart(ctx *lexparse.ParserContext[*tmplNode]) error {
403+
func parseVarStart(ctx context.Context, cur *lexparse.ParseCursor[*tmplNode]) error {
403404
// Consume the var start token.
404-
_ = ctx.Next()
405+
_ = cur.Next(ctx)
405406

406-
ctx.PushState(
407+
cur.PushState(
407408
lexparse.ParseStateFn(parseVar),
408409
lexparse.ParseStateFn(parseVarEnd),
409410
)

0 commit comments

Comments
 (0)