| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233 |
- // Package parser provides Timeline parsing based on timeline.jison
- package parser
- import (
- "fmt"
- "strings"
- "mermaid-go/pkg/ast"
- "mermaid-go/pkg/lexer"
- )
- // TimelineParser implements Timeline parsing following timeline.jison
- type TimelineParser struct {
- tokens []lexer.Token
- current int
- diagram *ast.TimelineDiagram
- }
- // NewTimelineParser creates a new Timeline parser
- func NewTimelineParser() *TimelineParser {
- return &TimelineParser{
- diagram: &ast.TimelineDiagram{
- Sections: make([]*ast.TimelineSection, 0),
- Config: make(map[string]any),
- },
- }
- }
- // Parse parses Timeline syntax
- func (p *TimelineParser) Parse(input string) (*ast.TimelineDiagram, error) {
- // Tokenize
- l := lexer.NewLexer(input)
- tokens, err := l.Tokenize()
- if err != nil {
- return nil, fmt.Errorf("lexical analysis failed: %w", err)
- }
- // Filter tokens
- p.tokens = lexer.FilterTokens(tokens)
- p.current = 0
- p.diagram = &ast.TimelineDiagram{
- Sections: make([]*ast.TimelineSection, 0),
- Config: make(map[string]any),
- }
- // Parse document
- err = p.parseDocument()
- if err != nil {
- return nil, fmt.Errorf("syntax analysis failed: %w", err)
- }
- return p.diagram, nil
- }
- // parseDocument parses the Timeline document
- func (p *TimelineParser) parseDocument() error {
- // Expect timeline
- if !p.check(lexer.TokenID) || p.peek().Value != "timeline" {
- return p.error("expected 'timeline'")
- }
- p.advance()
- // Parse statements
- for !p.isAtEnd() {
- if err := p.parseStatement(); err != nil {
- return err
- }
- }
- return nil
- }
- // parseStatement parses individual Timeline statements
- func (p *TimelineParser) parseStatement() error {
- if p.isAtEnd() {
- return nil
- }
- switch {
- case p.check(lexer.TokenNewline):
- p.advance() // Skip newlines
- return nil
- case p.checkKeyword("title"):
- return p.parseTitle()
- case p.checkKeyword("section"):
- return p.parseSection()
- case p.check(lexer.TokenColon):
- return p.parseEvent()
- case p.check(lexer.TokenID):
- // Period definition
- return p.parsePeriod()
- default:
- token := p.peek()
- return p.error(fmt.Sprintf("unexpected token: %s", token.Value))
- }
- }
- // parseTitle parses title statements
- func (p *TimelineParser) parseTitle() error {
- p.advance() // consume 'title'
- var titleParts []string
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- titleParts = append(titleParts, p.advance().Value)
- }
- if len(titleParts) > 0 {
- title := strings.TrimSpace(strings.Join(titleParts, " "))
- p.diagram.Title = &title
- }
- return nil
- }
- // parseSection parses section statements
- func (p *TimelineParser) parseSection() error {
- p.advance() // consume 'section'
- var sectionParts []string
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- sectionParts = append(sectionParts, p.advance().Value)
- }
- if len(sectionParts) > 0 {
- sectionName := strings.TrimSpace(strings.Join(sectionParts, " "))
- section := &ast.TimelineSection{
- Name: sectionName,
- Events: make([]*ast.TimelineEvent, 0),
- }
- p.diagram.Sections = append(p.diagram.Sections, section)
- }
- return nil
- }
- // parseEvent parses event statements (starting with :)
- func (p *TimelineParser) parseEvent() error {
- p.advance() // consume ':'
- var eventParts []string
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- eventParts = append(eventParts, p.advance().Value)
- }
- if len(eventParts) > 0 {
- eventText := strings.TrimSpace(strings.Join(eventParts, " "))
- event := &ast.TimelineEvent{
- Name: eventText,
- }
- // Add to current section or create default section
- if len(p.diagram.Sections) == 0 {
- section := &ast.TimelineSection{
- Name: "Timeline",
- Events: make([]*ast.TimelineEvent, 0),
- }
- p.diagram.Sections = append(p.diagram.Sections, section)
- }
- currentSection := p.diagram.Sections[len(p.diagram.Sections)-1]
- currentSection.Events = append(currentSection.Events, event)
- }
- return nil
- }
- // parsePeriod parses period statements
- func (p *TimelineParser) parsePeriod() error {
- var periodParts []string
- for !p.check(lexer.TokenColon) && !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- periodParts = append(periodParts, p.advance().Value)
- }
- if len(periodParts) > 0 {
- periodName := strings.TrimSpace(strings.Join(periodParts, " "))
- // Create a section for this period
- section := &ast.TimelineSection{
- Name: periodName,
- Events: make([]*ast.TimelineEvent, 0),
- }
- p.diagram.Sections = append(p.diagram.Sections, section)
- }
- return nil
- }
- // Helper methods
- func (p *TimelineParser) check(tokenType lexer.TokenType) bool {
- if p.isAtEnd() {
- return false
- }
- return p.peek().Type == tokenType
- }
- func (p *TimelineParser) checkKeyword(keyword string) bool {
- if p.isAtEnd() {
- return false
- }
- token := p.peek()
- return token.Type == lexer.TokenID && strings.ToLower(token.Value) == strings.ToLower(keyword)
- }
- func (p *TimelineParser) advance() lexer.Token {
- if !p.isAtEnd() {
- p.current++
- }
- return p.previous()
- }
- func (p *TimelineParser) isAtEnd() bool {
- return p.current >= len(p.tokens) || p.peek().Type == lexer.TokenEOF
- }
- func (p *TimelineParser) peek() lexer.Token {
- if p.current >= len(p.tokens) {
- return lexer.Token{Type: lexer.TokenEOF}
- }
- return p.tokens[p.current]
- }
- func (p *TimelineParser) previous() lexer.Token {
- if p.current <= 0 {
- return lexer.Token{Type: lexer.TokenEOF}
- }
- return p.tokens[p.current-1]
- }
- func (p *TimelineParser) error(message string) error {
- token := p.peek()
- return fmt.Errorf("parse error at line %d, column %d: %s (got %s)",
- token.Line, token.Column, message, token.Type.String())
- }
|