// Package parser provides User Journey parsing based on journey.jison package parser import ( "fmt" "strconv" "strings" "mermaid-go/pkg/ast" "mermaid-go/pkg/lexer" ) // JourneyParser implements User Journey parsing following journey.jison type JourneyParser struct { tokens []lexer.Token current int diagram *ast.UserJourneyDiagram } // NewJourneyParser creates a new Journey parser func NewJourneyParser() *JourneyParser { return &JourneyParser{ diagram: &ast.UserJourneyDiagram{ Sections: make([]*ast.UserJourneySection, 0), Config: make(map[string]any), }, } } // Parse parses User Journey syntax func (p *JourneyParser) Parse(input string) (*ast.UserJourneyDiagram, error) { // Tokenize l := lexer.NewLexer(input) tokens, err := l.Tokenize() if err != nil { return nil, fmt.Errorf("lexical analysis failed: %w", err) } // Filter tokens p.tokens = lexer.FilterTokens(tokens) p.current = 0 p.diagram = &ast.UserJourneyDiagram{ Sections: make([]*ast.UserJourneySection, 0), Config: make(map[string]any), } // Parse document err = p.parseDocument() if err != nil { return nil, fmt.Errorf("syntax analysis failed: %w", err) } return p.diagram, nil } // parseDocument parses the User Journey document func (p *JourneyParser) parseDocument() error { // Expect journey if !p.check(lexer.TokenID) || p.peek().Value != "journey" { return p.error("expected 'journey'") } p.advance() // Parse statements for !p.isAtEnd() { if err := p.parseStatement(); err != nil { return err } } return nil } // parseStatement parses individual User Journey statements func (p *JourneyParser) parseStatement() error { if p.isAtEnd() { return nil } switch { case p.check(lexer.TokenNewline): p.advance() // Skip newlines return nil case p.checkKeyword("title"): return p.parseTitle() case p.checkKeyword("section"): return p.parseSection() case p.check(lexer.TokenID): // Task definition return p.parseTask() default: token := p.peek() return p.error(fmt.Sprintf("unexpected token: %s", token.Value)) } } // parseTitle parses title statements func (p *JourneyParser) parseTitle() error { p.advance() // consume 'title' var titleParts []string for !p.check(lexer.TokenNewline) && !p.isAtEnd() { titleParts = append(titleParts, p.advance().Value) } if len(titleParts) > 0 { title := strings.TrimSpace(strings.Join(titleParts, " ")) p.diagram.Title = &title } return nil } // parseSection parses section statements func (p *JourneyParser) parseSection() error { p.advance() // consume 'section' var sectionParts []string for !p.check(lexer.TokenNewline) && !p.isAtEnd() { sectionParts = append(sectionParts, p.advance().Value) } if len(sectionParts) > 0 { sectionName := strings.TrimSpace(strings.Join(sectionParts, " ")) section := &ast.UserJourneySection{ Name: sectionName, Tasks: make([]*ast.UserJourneyTask, 0), } p.diagram.Sections = append(p.diagram.Sections, section) } return nil } // parseTask parses task definitions func (p *JourneyParser) parseTask() error { // Parse task name var taskNameParts []string for !p.check(lexer.TokenColon) && !p.check(lexer.TokenNewline) && !p.isAtEnd() { taskNameParts = append(taskNameParts, p.advance().Value) } if len(taskNameParts) == 0 { return p.error("expected task name") } taskName := strings.TrimSpace(strings.Join(taskNameParts, " ")) // Expect colon if !p.check(lexer.TokenColon) { return p.error("expected ':' after task name") } p.advance() // Parse score and people var taskDataParts []string for !p.check(lexer.TokenNewline) && !p.isAtEnd() { taskDataParts = append(taskDataParts, p.advance().Value) } task := &ast.UserJourneyTask{ Name: taskName, People: make([]string, 0), } // Parse task data (score : person1, person2, ...) if len(taskDataParts) > 0 { taskData := strings.TrimSpace(strings.Join(taskDataParts, " ")) parts := strings.Split(taskData, ":") // Parse score if len(parts) > 0 { scoreStr := strings.TrimSpace(parts[0]) if scoreStr != "" { if score, err := strconv.ParseFloat(scoreStr, 64); err == nil { task.Score = &score } } } // Parse people if len(parts) > 1 { peopleStr := strings.TrimSpace(parts[1]) if peopleStr != "" { people := strings.Split(peopleStr, ",") for _, person := range people { person = strings.TrimSpace(person) if person != "" { task.People = append(task.People, person) } } } } } // Add task to current section or create default section if len(p.diagram.Sections) == 0 { section := &ast.UserJourneySection{ Name: "User Journey", Tasks: make([]*ast.UserJourneyTask, 0), } p.diagram.Sections = append(p.diagram.Sections, section) } currentSection := p.diagram.Sections[len(p.diagram.Sections)-1] currentSection.Tasks = append(currentSection.Tasks, task) return nil } // Helper methods func (p *JourneyParser) check(tokenType lexer.TokenType) bool { if p.isAtEnd() { return false } return p.peek().Type == tokenType } func (p *JourneyParser) checkKeyword(keyword string) bool { if p.isAtEnd() { return false } token := p.peek() return token.Type == lexer.TokenID && strings.ToLower(token.Value) == strings.ToLower(keyword) } func (p *JourneyParser) advance() lexer.Token { if !p.isAtEnd() { p.current++ } return p.previous() } func (p *JourneyParser) isAtEnd() bool { return p.current >= len(p.tokens) || p.peek().Type == lexer.TokenEOF } func (p *JourneyParser) peek() lexer.Token { if p.current >= len(p.tokens) { return lexer.Token{Type: lexer.TokenEOF} } return p.tokens[p.current] } func (p *JourneyParser) previous() lexer.Token { if p.current <= 0 { return lexer.Token{Type: lexer.TokenEOF} } return p.tokens[p.current-1] } func (p *JourneyParser) error(message string) error { token := p.peek() return fmt.Errorf("parse error at line %d, column %d: %s (got %s)", token.Line, token.Column, message, token.Type.String()) }