|
|
@@ -0,0 +1,623 @@
|
|
|
+// Package parser provides sequence diagram parsing based on sequenceDiagram.jison
|
|
|
+package parser
|
|
|
+
|
|
|
+import (
|
|
|
+ "fmt"
|
|
|
+ "strings"
|
|
|
+
|
|
|
+ "mermaid-go/pkg/ast"
|
|
|
+ "mermaid-go/pkg/lexer"
|
|
|
+)
|
|
|
+
|
|
|
+// SequenceParser implements sequence diagram parsing following sequenceDiagram.jison
|
|
|
+type SequenceParser struct {
|
|
|
+ tokens []lexer.Token
|
|
|
+ current int
|
|
|
+ diagram *ast.SequenceDiagram
|
|
|
+}
|
|
|
+
|
|
|
+// NewSequenceParser creates a new sequence parser
|
|
|
+func NewSequenceParser() *SequenceParser {
|
|
|
+ return &SequenceParser{
|
|
|
+ diagram: ast.NewSequenceDiagram(),
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+// Parse parses sequence diagram syntax
|
|
|
+func (p *SequenceParser) Parse(input string) (*ast.SequenceDiagram, error) {
|
|
|
+ // Tokenize
|
|
|
+ l := lexer.NewLexer(input)
|
|
|
+ tokens, err := l.Tokenize()
|
|
|
+ if err != nil {
|
|
|
+ return nil, fmt.Errorf("lexical analysis failed: %w", err)
|
|
|
+ }
|
|
|
+
|
|
|
+ // Filter tokens
|
|
|
+ p.tokens = lexer.FilterTokens(tokens)
|
|
|
+ p.current = 0
|
|
|
+ p.diagram = ast.NewSequenceDiagram()
|
|
|
+
|
|
|
+ // Parse document
|
|
|
+ err = p.parseDocument()
|
|
|
+ if err != nil {
|
|
|
+ return nil, fmt.Errorf("syntax analysis failed: %w", err)
|
|
|
+ }
|
|
|
+
|
|
|
+ return p.diagram, nil
|
|
|
+}
|
|
|
+
|
|
|
+// parseDocument parses the sequence diagram document
|
|
|
+func (p *SequenceParser) parseDocument() error {
|
|
|
+ // Expect sequenceDiagram
|
|
|
+ if !p.check(lexer.TokenID) || p.peek().Value != "sequenceDiagram" {
|
|
|
+ return p.error("expected 'sequenceDiagram'")
|
|
|
+ }
|
|
|
+ p.advance()
|
|
|
+
|
|
|
+ // Parse statements
|
|
|
+ for !p.isAtEnd() {
|
|
|
+ if err := p.parseStatement(); err != nil {
|
|
|
+ return err
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+// parseStatement parses individual sequence diagram statements
|
|
|
+func (p *SequenceParser) parseStatement() error {
|
|
|
+ if p.isAtEnd() {
|
|
|
+ return nil
|
|
|
+ }
|
|
|
+
|
|
|
+ token := p.peek()
|
|
|
+ switch {
|
|
|
+ case p.check(lexer.TokenNewline):
|
|
|
+ p.advance() // Skip newlines
|
|
|
+ return nil
|
|
|
+ case p.checkKeyword("participant"):
|
|
|
+ return p.parseParticipant()
|
|
|
+ case p.checkKeyword("actor"):
|
|
|
+ return p.parseActor()
|
|
|
+ case p.checkKeyword("Note"):
|
|
|
+ return p.parseNote()
|
|
|
+ case p.checkKeyword("loop"):
|
|
|
+ return p.parseLoop()
|
|
|
+ case p.checkKeyword("alt"):
|
|
|
+ return p.parseAlt()
|
|
|
+ case p.checkKeyword("opt"):
|
|
|
+ return p.parseOpt()
|
|
|
+ case p.checkKeyword("par"):
|
|
|
+ return p.parsePar()
|
|
|
+ case p.checkKeyword("box"):
|
|
|
+ return p.parseBox()
|
|
|
+ case p.checkKeyword("activate"):
|
|
|
+ return p.parseActivate()
|
|
|
+ case p.checkKeyword("deactivate"):
|
|
|
+ return p.parseDeactivate()
|
|
|
+ case p.check(lexer.TokenID):
|
|
|
+ // Try to parse as message
|
|
|
+ return p.parseMessage()
|
|
|
+ default:
|
|
|
+ return p.error(fmt.Sprintf("unexpected token: %s", token.Value))
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+// parseParticipant parses participant statements
|
|
|
+func (p *SequenceParser) parseParticipant() error {
|
|
|
+ p.advance() // consume 'participant'
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenID) {
|
|
|
+ return p.error("expected participant ID")
|
|
|
+ }
|
|
|
+
|
|
|
+ id := p.advance().Value
|
|
|
+ participant := &ast.SequenceParticipant{
|
|
|
+ ID: id,
|
|
|
+ Name: id,
|
|
|
+ Type: ast.ParticipantTypeParticipant,
|
|
|
+ }
|
|
|
+
|
|
|
+ // Check for 'as' alias
|
|
|
+ if p.checkKeyword("as") {
|
|
|
+ p.advance() // consume 'as'
|
|
|
+ if !p.check(lexer.TokenID) && !p.check(lexer.TokenString) {
|
|
|
+ return p.error("expected participant name after 'as'")
|
|
|
+ }
|
|
|
+ name := p.advance().Value
|
|
|
+ if strings.HasPrefix(name, "\"") && strings.HasSuffix(name, "\"") {
|
|
|
+ name = name[1 : len(name)-1] // Remove quotes
|
|
|
+ }
|
|
|
+ participant.Name = name
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Participants = append(p.diagram.Participants, participant)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+// parseActor parses actor statements (similar to participant but different type)
|
|
|
+func (p *SequenceParser) parseActor() error {
|
|
|
+ p.advance() // consume 'actor'
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenID) {
|
|
|
+ return p.error("expected actor ID")
|
|
|
+ }
|
|
|
+
|
|
|
+ id := p.advance().Value
|
|
|
+ participant := &ast.SequenceParticipant{
|
|
|
+ ID: id,
|
|
|
+ Name: id,
|
|
|
+ Type: ast.ParticipantTypeActor,
|
|
|
+ }
|
|
|
+
|
|
|
+ // Check for 'as' alias
|
|
|
+ if p.checkKeyword("as") {
|
|
|
+ p.advance() // consume 'as'
|
|
|
+ if !p.check(lexer.TokenID) && !p.check(lexer.TokenString) {
|
|
|
+ return p.error("expected actor name after 'as'")
|
|
|
+ }
|
|
|
+ name := p.advance().Value
|
|
|
+ if strings.HasPrefix(name, "\"") && strings.HasSuffix(name, "\"") {
|
|
|
+ name = name[1 : len(name)-1] // Remove quotes
|
|
|
+ }
|
|
|
+ participant.Name = name
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Participants = append(p.diagram.Participants, participant)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+// parseMessage parses sequence diagram messages
|
|
|
+func (p *SequenceParser) parseMessage() error {
|
|
|
+ // Parse: FROM ARROW TO : MESSAGE
|
|
|
+ from := p.advance().Value
|
|
|
+
|
|
|
+ // Parse arrow type
|
|
|
+ msgType, err := p.parseArrowType()
|
|
|
+ if err != nil {
|
|
|
+ return err
|
|
|
+ }
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenID) {
|
|
|
+ return p.error("expected target participant")
|
|
|
+ }
|
|
|
+ to := p.advance().Value
|
|
|
+
|
|
|
+ var message string
|
|
|
+ if p.check(lexer.TokenColon) {
|
|
|
+ p.advance() // consume ':'
|
|
|
+ // Collect message text until newline
|
|
|
+ var msgParts []string
|
|
|
+ for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
|
|
|
+ msgParts = append(msgParts, p.advance().Value)
|
|
|
+ }
|
|
|
+ message = strings.TrimSpace(strings.Join(msgParts, " "))
|
|
|
+ }
|
|
|
+
|
|
|
+ seqMsg := &ast.SequenceMessage{
|
|
|
+ From: from,
|
|
|
+ To: to,
|
|
|
+ Message: message,
|
|
|
+ Type: msgType,
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Messages = append(p.diagram.Messages, seqMsg)
|
|
|
+
|
|
|
+ // Ensure participants exist
|
|
|
+ p.ensureParticipant(from)
|
|
|
+ p.ensureParticipant(to)
|
|
|
+
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+// parseArrowType parses arrow types for messages
|
|
|
+func (p *SequenceParser) parseArrowType() (ast.SequenceMessageType, error) {
|
|
|
+ token := p.peek()
|
|
|
+
|
|
|
+ if p.check(lexer.TokenArrowSolid) {
|
|
|
+ p.advance()
|
|
|
+ return ast.MessageTypeSolid, nil
|
|
|
+ } else if p.check(lexer.TokenArrowDotted) {
|
|
|
+ p.advance()
|
|
|
+ return ast.MessageTypeDotted, nil
|
|
|
+ } else if token.Type == lexer.TokenMinus && p.checkNext(lexer.TokenCloseAngle) {
|
|
|
+ p.advance() // consume '-'
|
|
|
+ p.advance() // consume '>'
|
|
|
+ return ast.MessageTypeSolid, nil
|
|
|
+ }
|
|
|
+
|
|
|
+ return "", p.error("expected arrow type")
|
|
|
+}
|
|
|
+
|
|
|
+// parseNote parses note statements
|
|
|
+func (p *SequenceParser) parseNote() error {
|
|
|
+ p.advance() // consume 'Note'
|
|
|
+
|
|
|
+ var placement ast.NotePlace
|
|
|
+ var actor string
|
|
|
+
|
|
|
+ if p.checkKeyword("left") {
|
|
|
+ p.advance()
|
|
|
+ if !p.checkKeyword("of") {
|
|
|
+ return p.error("expected 'of' after 'left'")
|
|
|
+ }
|
|
|
+ p.advance()
|
|
|
+ placement = ast.NotePlaceLeft
|
|
|
+ } else if p.checkKeyword("right") {
|
|
|
+ p.advance()
|
|
|
+ if !p.checkKeyword("of") {
|
|
|
+ return p.error("expected 'of' after 'right'")
|
|
|
+ }
|
|
|
+ p.advance()
|
|
|
+ placement = ast.NotePlaceRight
|
|
|
+ } else if p.checkKeyword("over") {
|
|
|
+ p.advance()
|
|
|
+ placement = ast.NotePlaceOver
|
|
|
+ } else {
|
|
|
+ return p.error("expected note placement (left of, right of, over)")
|
|
|
+ }
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenID) {
|
|
|
+ return p.error("expected participant ID for note")
|
|
|
+ }
|
|
|
+ actor = p.advance().Value
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenColon) {
|
|
|
+ return p.error("expected ':' after participant in note")
|
|
|
+ }
|
|
|
+ p.advance()
|
|
|
+
|
|
|
+ // Collect note text
|
|
|
+ var noteParts []string
|
|
|
+ for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
|
|
|
+ noteParts = append(noteParts, p.advance().Value)
|
|
|
+ }
|
|
|
+ noteText := strings.TrimSpace(strings.Join(noteParts, " "))
|
|
|
+
|
|
|
+ note := &ast.SequenceNote{
|
|
|
+ Actor: actor,
|
|
|
+ Placement: placement,
|
|
|
+ Message: noteText,
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Notes = append(p.diagram.Notes, note)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+// Placeholder implementations for complex structures
|
|
|
+func (p *SequenceParser) parseLoop() error {
|
|
|
+ p.advance() // consume 'loop'
|
|
|
+
|
|
|
+ // Parse loop condition/label
|
|
|
+ var labelParts []string
|
|
|
+ for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
|
|
|
+ labelParts = append(labelParts, p.advance().Value)
|
|
|
+ }
|
|
|
+ label := strings.TrimSpace(strings.Join(labelParts, " "))
|
|
|
+
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+
|
|
|
+ loop := &ast.SequenceLoop{
|
|
|
+ Label: label,
|
|
|
+ Messages: make([]*ast.SequenceMessage, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ // Parse statements until 'end'
|
|
|
+ for !p.isAtEnd() {
|
|
|
+ if p.checkKeyword("end") {
|
|
|
+ p.advance()
|
|
|
+ break
|
|
|
+ }
|
|
|
+ if err := p.parseStatement(); err != nil {
|
|
|
+ return err
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Loops = append(p.diagram.Loops, loop)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) parseAlt() error {
|
|
|
+ p.advance() // consume 'alt'
|
|
|
+
|
|
|
+ // Parse alt condition/label
|
|
|
+ var labelParts []string
|
|
|
+ for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
|
|
|
+ labelParts = append(labelParts, p.advance().Value)
|
|
|
+ }
|
|
|
+ label := strings.TrimSpace(strings.Join(labelParts, " "))
|
|
|
+
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+
|
|
|
+ alt := &ast.SequenceAlt{
|
|
|
+ Label: label,
|
|
|
+ IfMessages: make([]*ast.SequenceMessage, 0),
|
|
|
+ ElseMessages: make([]*ast.SequenceMessage, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ // Parse statements until 'else' or 'end'
|
|
|
+ for !p.isAtEnd() {
|
|
|
+ if p.checkKeyword("else") {
|
|
|
+ p.advance()
|
|
|
+ // Skip to next line
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ if p.checkKeyword("end") {
|
|
|
+ p.advance()
|
|
|
+ break
|
|
|
+ }
|
|
|
+ if err := p.parseStatement(); err != nil {
|
|
|
+ return err
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Alts = append(p.diagram.Alts, alt)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) parseOpt() error {
|
|
|
+ p.advance() // consume 'opt'
|
|
|
+
|
|
|
+ // Parse opt condition/label
|
|
|
+ var labelParts []string
|
|
|
+ for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
|
|
|
+ labelParts = append(labelParts, p.advance().Value)
|
|
|
+ }
|
|
|
+ label := strings.TrimSpace(strings.Join(labelParts, " "))
|
|
|
+
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+
|
|
|
+ opt := &ast.SequenceOpt{
|
|
|
+ Label: label,
|
|
|
+ Messages: make([]*ast.SequenceMessage, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ // Parse statements until 'end'
|
|
|
+ for !p.isAtEnd() {
|
|
|
+ if p.checkKeyword("end") {
|
|
|
+ p.advance()
|
|
|
+ break
|
|
|
+ }
|
|
|
+ if err := p.parseStatement(); err != nil {
|
|
|
+ return err
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Opts = append(p.diagram.Opts, opt)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) parsePar() error {
|
|
|
+ p.advance() // consume 'par'
|
|
|
+
|
|
|
+ // Parse first section (no label)
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+
|
|
|
+ par := &ast.SequencePar{
|
|
|
+ Sections: make([]ast.SequenceParSection, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ currentSection := ast.SequenceParSection{
|
|
|
+ Messages: make([]*ast.SequenceMessage, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ // Parse statements until 'and' or 'end'
|
|
|
+ for !p.isAtEnd() {
|
|
|
+ if p.checkKeyword("and") {
|
|
|
+ // Save current section and start new one
|
|
|
+ par.Sections = append(par.Sections, currentSection)
|
|
|
+ p.advance() // consume 'and'
|
|
|
+
|
|
|
+ // Parse label for new section
|
|
|
+ var labelParts []string
|
|
|
+ for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
|
|
|
+ labelParts = append(labelParts, p.advance().Value)
|
|
|
+ }
|
|
|
+ label := strings.TrimSpace(strings.Join(labelParts, " "))
|
|
|
+
|
|
|
+ currentSection = ast.SequenceParSection{
|
|
|
+ Label: &label,
|
|
|
+ Messages: make([]*ast.SequenceMessage, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ if p.checkKeyword("end") {
|
|
|
+ p.advance()
|
|
|
+ break
|
|
|
+ }
|
|
|
+ if err := p.parseStatement(); err != nil {
|
|
|
+ return err
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // Add final section
|
|
|
+ par.Sections = append(par.Sections, currentSection)
|
|
|
+ p.diagram.Pars = append(p.diagram.Pars, par)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) parseBox() error {
|
|
|
+ p.advance() // consume 'box'
|
|
|
+
|
|
|
+ var name string
|
|
|
+ var color *string
|
|
|
+
|
|
|
+ // Parse box name and optional color
|
|
|
+ if p.check(lexer.TokenString) {
|
|
|
+ name = p.advance().Value
|
|
|
+ // Remove quotes
|
|
|
+ if strings.HasPrefix(name, "\"") && strings.HasSuffix(name, "\"") {
|
|
|
+ name = name[1 : len(name)-1]
|
|
|
+ }
|
|
|
+ } else if p.check(lexer.TokenID) {
|
|
|
+ name = p.advance().Value
|
|
|
+ }
|
|
|
+
|
|
|
+ // Check for color
|
|
|
+ if p.check(lexer.TokenID) {
|
|
|
+ colorVal := p.advance().Value
|
|
|
+ color = &colorVal
|
|
|
+ }
|
|
|
+
|
|
|
+ box := &ast.SequenceBox{
|
|
|
+ Name: name,
|
|
|
+ Color: color,
|
|
|
+ Participants: make([]string, 0),
|
|
|
+ }
|
|
|
+
|
|
|
+ // Parse participants until 'end'
|
|
|
+ for !p.isAtEnd() && !p.check(lexer.TokenNewline) {
|
|
|
+ if p.check(lexer.TokenID) {
|
|
|
+ participant := p.advance().Value
|
|
|
+ box.Participants = append(box.Participants, participant)
|
|
|
+ } else {
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Boxes = append(p.diagram.Boxes, box)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) parseActivate() error {
|
|
|
+ p.advance() // consume 'activate'
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenID) {
|
|
|
+ return p.error("expected participant ID after 'activate'")
|
|
|
+ }
|
|
|
+
|
|
|
+ actor := p.advance().Value
|
|
|
+
|
|
|
+ activation := &ast.SequenceActivation{
|
|
|
+ Actor: actor,
|
|
|
+ Type: ast.ActivationTypeActivate,
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Activations = append(p.diagram.Activations, activation)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) parseDeactivate() error {
|
|
|
+ p.advance() // consume 'deactivate'
|
|
|
+
|
|
|
+ if !p.check(lexer.TokenID) {
|
|
|
+ return p.error("expected participant ID after 'deactivate'")
|
|
|
+ }
|
|
|
+
|
|
|
+ actor := p.advance().Value
|
|
|
+
|
|
|
+ activation := &ast.SequenceActivation{
|
|
|
+ Actor: actor,
|
|
|
+ Type: ast.ActivationTypeDeactivate,
|
|
|
+ }
|
|
|
+
|
|
|
+ p.diagram.Activations = append(p.diagram.Activations, activation)
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+// ensureParticipant ensures a participant exists, creating it if needed
|
|
|
+func (p *SequenceParser) ensureParticipant(id string) {
|
|
|
+ for _, participant := range p.diagram.Participants {
|
|
|
+ if participant.ID == id {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // Create participant if it doesn't exist
|
|
|
+ participant := &ast.SequenceParticipant{
|
|
|
+ ID: id,
|
|
|
+ Name: id,
|
|
|
+ Type: ast.ParticipantTypeParticipant,
|
|
|
+ }
|
|
|
+ p.diagram.Participants = append(p.diagram.Participants, participant)
|
|
|
+}
|
|
|
+
|
|
|
+// Helper methods
|
|
|
+func (p *SequenceParser) check(tokenType lexer.TokenType) bool {
|
|
|
+ if p.isAtEnd() {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ return p.peek().Type == tokenType
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) checkNext(tokenType lexer.TokenType) bool {
|
|
|
+ if p.current+1 >= len(p.tokens) {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ return p.tokens[p.current+1].Type == tokenType
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) checkKeyword(keyword string) bool {
|
|
|
+ if p.isAtEnd() {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ token := p.peek()
|
|
|
+ return token.Type == lexer.TokenID && strings.ToLower(token.Value) == strings.ToLower(keyword)
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) advance() lexer.Token {
|
|
|
+ if !p.isAtEnd() {
|
|
|
+ p.current++
|
|
|
+ }
|
|
|
+ return p.previous()
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) isAtEnd() bool {
|
|
|
+ return p.current >= len(p.tokens) || p.peek().Type == lexer.TokenEOF
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) peek() lexer.Token {
|
|
|
+ if p.current >= len(p.tokens) {
|
|
|
+ return lexer.Token{Type: lexer.TokenEOF}
|
|
|
+ }
|
|
|
+ return p.tokens[p.current]
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) previous() lexer.Token {
|
|
|
+ if p.current <= 0 {
|
|
|
+ return lexer.Token{Type: lexer.TokenEOF}
|
|
|
+ }
|
|
|
+ return p.tokens[p.current-1]
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) error(message string) error {
|
|
|
+ token := p.peek()
|
|
|
+ return fmt.Errorf("parse error at line %d, column %d: %s (got %s)",
|
|
|
+ token.Line, token.Column, message, token.Type)
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) skipToEnd(endKeyword string) error {
|
|
|
+ for !p.isAtEnd() {
|
|
|
+ if p.checkKeyword(endKeyword) {
|
|
|
+ p.advance()
|
|
|
+ break
|
|
|
+ }
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+ return nil
|
|
|
+}
|
|
|
+
|
|
|
+func (p *SequenceParser) skipToNextStatement() error {
|
|
|
+ for !p.isAtEnd() && !p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+ if p.check(lexer.TokenNewline) {
|
|
|
+ p.advance()
|
|
|
+ }
|
|
|
+ return nil
|
|
|
+}
|