| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369 |
- // Package parser provides Architecture parsing based on architectureParser.ts
- package parser
- import (
- "fmt"
- "strings"
- "mermaid-go/pkg/ast"
- "mermaid-go/pkg/lexer"
- )
- // ArchitectureParser implements Architecture parsing
- type ArchitectureParser struct {
- tokens []lexer.Token
- current int
- diagram *ast.ArchitectureDiagram
- }
- // NewArchitectureParser creates a new Architecture parser
- func NewArchitectureParser() *ArchitectureParser {
- return &ArchitectureParser{
- diagram: ast.NewArchitectureDiagram(),
- }
- }
- // Parse parses Architecture syntax
- func (p *ArchitectureParser) Parse(input string) (*ast.ArchitectureDiagram, error) {
- // Tokenize
- l := lexer.NewLexer(input)
- tokens, err := l.Tokenize()
- if err != nil {
- return nil, fmt.Errorf("lexical analysis failed: %w", err)
- }
- // Filter tokens
- p.tokens = lexer.FilterTokens(tokens)
- p.current = 0
- p.diagram = ast.NewArchitectureDiagram()
- // Parse document
- err = p.parseDocument()
- if err != nil {
- return nil, fmt.Errorf("syntax analysis failed: %w", err)
- }
- return p.diagram, nil
- }
- // parseDocument parses the Architecture document
- func (p *ArchitectureParser) parseDocument() error {
- // Expect architecture
- if !p.check(lexer.TokenID) || p.peek().Value != "architecture" {
- return p.error("expected 'architecture'")
- }
- p.advance()
- // Parse statements
- for !p.isAtEnd() {
- if err := p.parseStatement(); err != nil {
- return err
- }
- }
- return nil
- }
- // parseStatement parses individual Architecture statements
- func (p *ArchitectureParser) parseStatement() error {
- if p.isAtEnd() {
- return nil
- }
- switch {
- case p.check(lexer.TokenNewline):
- p.advance() // Skip newlines
- return nil
- case p.checkKeyword("title"):
- return p.parseTitle()
- case p.checkKeyword("service"):
- return p.parseService()
- case p.checkKeyword("group"):
- return p.parseGroup()
- case p.check(lexer.TokenID):
- // Could be service definition or edge
- return p.parseServiceOrEdge()
- default:
- token := p.peek()
- return p.error(fmt.Sprintf("unexpected token: %s", token.Value))
- }
- }
- // parseTitle parses title statements
- func (p *ArchitectureParser) parseTitle() error {
- p.advance() // consume 'title'
- var titleParts []string
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- titleParts = append(titleParts, p.advance().Value)
- }
- if len(titleParts) > 0 {
- title := strings.TrimSpace(strings.Join(titleParts, " "))
- p.diagram.Title = &title
- }
- return nil
- }
- // parseService parses service statements
- func (p *ArchitectureParser) parseService() error {
- p.advance() // consume 'service'
- if !p.check(lexer.TokenID) {
- return p.error("expected service ID")
- }
- serviceID := p.advance().Value
- service := &ast.ArchitectureService{
- ID: serviceID,
- }
- // Parse optional service properties
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- if p.check(lexer.TokenOpenBracket) {
- p.advance() // consume '['
- // Parse service title
- var titleParts []string
- for !p.check(lexer.TokenCloseBracket) && !p.isAtEnd() {
- titleParts = append(titleParts, p.advance().Value)
- }
- if len(titleParts) > 0 {
- title := strings.TrimSpace(strings.Join(titleParts, " "))
- service.Title = &title
- }
- if p.check(lexer.TokenCloseBracket) {
- p.advance() // consume ']'
- }
- } else if p.checkKeyword("in") {
- p.advance() // consume 'in'
- if p.check(lexer.TokenID) {
- groupID := p.advance().Value
- service.In = &groupID
- }
- } else {
- p.advance() // consume unknown token
- }
- }
- p.diagram.Services = append(p.diagram.Services, service)
- return nil
- }
- // parseGroup parses group statements
- func (p *ArchitectureParser) parseGroup() error {
- p.advance() // consume 'group'
- if !p.check(lexer.TokenID) {
- return p.error("expected group ID")
- }
- groupID := p.advance().Value
- group := &ast.ArchitectureGroup{
- ID: groupID,
- }
- // Parse optional group properties
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- if p.check(lexer.TokenOpenBracket) {
- p.advance() // consume '['
- // Parse group title
- var titleParts []string
- for !p.check(lexer.TokenCloseBracket) && !p.isAtEnd() {
- titleParts = append(titleParts, p.advance().Value)
- }
- if len(titleParts) > 0 {
- title := strings.TrimSpace(strings.Join(titleParts, " "))
- group.Title = &title
- }
- if p.check(lexer.TokenCloseBracket) {
- p.advance() // consume ']'
- }
- } else if p.checkKeyword("in") {
- p.advance() // consume 'in'
- if p.check(lexer.TokenID) {
- parentID := p.advance().Value
- group.In = &parentID
- }
- } else {
- p.advance() // consume unknown token
- }
- }
- p.diagram.Groups = append(p.diagram.Groups, group)
- return nil
- }
- // parseServiceOrEdge parses service definition or edge
- func (p *ArchitectureParser) parseServiceOrEdge() error {
- serviceID := p.advance().Value
- // Check if this is an edge (has direction indicators)
- if p.checkDirection() {
- return p.parseEdge(serviceID)
- }
- // Otherwise, it's a simple service definition
- service := &ast.ArchitectureService{
- ID: serviceID,
- }
- // Parse optional service properties
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- if p.check(lexer.TokenOpenBracket) {
- p.advance() // consume '['
- // Parse service title
- var titleParts []string
- for !p.check(lexer.TokenCloseBracket) && !p.isAtEnd() {
- titleParts = append(titleParts, p.advance().Value)
- }
- if len(titleParts) > 0 {
- title := strings.TrimSpace(strings.Join(titleParts, " "))
- service.Title = &title
- }
- if p.check(lexer.TokenCloseBracket) {
- p.advance() // consume ']'
- }
- } else {
- p.advance() // consume unknown token
- }
- }
- p.diagram.Services = append(p.diagram.Services, service)
- return nil
- }
- // parseEdge parses edge connections
- func (p *ArchitectureParser) parseEdge(lhsID string) error {
- // Parse left direction
- lhsDir := p.parseDirection()
- // Skip connection symbols
- for p.check(lexer.TokenMinus) || p.check(lexer.TokenEquals) {
- p.advance()
- }
- // Parse right direction
- rhsDir := p.parseDirection()
- // Parse target service/group
- if !p.check(lexer.TokenID) {
- return p.error("expected target service/group ID")
- }
- rhsID := p.advance().Value
- edge := &ast.ArchitectureEdge{
- LhsID: lhsID,
- LhsDir: lhsDir,
- RhsID: rhsID,
- RhsDir: rhsDir,
- }
- // Parse optional edge title
- if p.check(lexer.TokenColon) {
- p.advance() // consume ':'
- var titleParts []string
- for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
- titleParts = append(titleParts, p.advance().Value)
- }
- if len(titleParts) > 0 {
- title := strings.TrimSpace(strings.Join(titleParts, " "))
- edge.Title = &title
- }
- }
- p.diagram.Edges = append(p.diagram.Edges, edge)
- return nil
- }
- // checkDirection checks if current token is a direction indicator
- func (p *ArchitectureParser) checkDirection() bool {
- if p.isAtEnd() {
- return false
- }
- token := p.peek()
- return token.Type == lexer.TokenID &&
- (token.Value == "L" || token.Value == "R" || token.Value == "T" || token.Value == "B")
- }
- // parseDirection parses direction indicators
- func (p *ArchitectureParser) parseDirection() ast.ArchitectureDirection {
- if !p.check(lexer.TokenID) {
- return ast.ArchitectureDirectionRight // default
- }
- token := p.advance()
- switch token.Value {
- case "L":
- return ast.ArchitectureDirectionLeft
- case "R":
- return ast.ArchitectureDirectionRight
- case "T":
- return ast.ArchitectureDirectionTop
- case "B":
- return ast.ArchitectureDirectionBottom
- default:
- return ast.ArchitectureDirectionRight // default
- }
- }
- // Helper methods
- func (p *ArchitectureParser) check(tokenType lexer.TokenType) bool {
- if p.isAtEnd() {
- return false
- }
- return p.peek().Type == tokenType
- }
- func (p *ArchitectureParser) checkKeyword(keyword string) bool {
- if p.isAtEnd() {
- return false
- }
- token := p.peek()
- return token.Type == lexer.TokenID && strings.ToLower(token.Value) == strings.ToLower(keyword)
- }
- func (p *ArchitectureParser) advance() lexer.Token {
- if !p.isAtEnd() {
- p.current++
- }
- return p.previous()
- }
- func (p *ArchitectureParser) isAtEnd() bool {
- return p.current >= len(p.tokens) || p.peek().Type == lexer.TokenEOF
- }
- func (p *ArchitectureParser) peek() lexer.Token {
- if p.current >= len(p.tokens) {
- return lexer.Token{Type: lexer.TokenEOF}
- }
- return p.tokens[p.current]
- }
- func (p *ArchitectureParser) previous() lexer.Token {
- if p.current <= 0 {
- return lexer.Token{Type: lexer.TokenEOF}
- }
- return p.tokens[p.current-1]
- }
- func (p *ArchitectureParser) error(message string) error {
- token := p.peek()
- return fmt.Errorf("parse error at line %d, column %d: %s (got %s)",
- token.Line, token.Column, message, token.Type.String())
- }
|