journey.go 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256
  1. // Package parser provides User Journey parsing based on journey.jison
  2. package parser
  3. import (
  4. "fmt"
  5. "strconv"
  6. "strings"
  7. "mermaid-go/pkg/ast"
  8. "mermaid-go/pkg/lexer"
  9. )
  10. // JourneyParser implements User Journey parsing following journey.jison
  11. type JourneyParser struct {
  12. tokens []lexer.Token
  13. current int
  14. diagram *ast.UserJourneyDiagram
  15. }
  16. // NewJourneyParser creates a new Journey parser
  17. func NewJourneyParser() *JourneyParser {
  18. return &JourneyParser{
  19. diagram: &ast.UserJourneyDiagram{
  20. Sections: make([]*ast.UserJourneySection, 0),
  21. Config: make(map[string]any),
  22. },
  23. }
  24. }
  25. // Parse parses User Journey syntax
  26. func (p *JourneyParser) Parse(input string) (*ast.UserJourneyDiagram, error) {
  27. // Tokenize
  28. l := lexer.NewLexer(input)
  29. tokens, err := l.Tokenize()
  30. if err != nil {
  31. return nil, fmt.Errorf("lexical analysis failed: %w", err)
  32. }
  33. // Filter tokens
  34. p.tokens = lexer.FilterTokens(tokens)
  35. p.current = 0
  36. p.diagram = &ast.UserJourneyDiagram{
  37. Sections: make([]*ast.UserJourneySection, 0),
  38. Config: make(map[string]any),
  39. }
  40. // Parse document
  41. err = p.parseDocument()
  42. if err != nil {
  43. return nil, fmt.Errorf("syntax analysis failed: %w", err)
  44. }
  45. return p.diagram, nil
  46. }
  47. // parseDocument parses the User Journey document
  48. func (p *JourneyParser) parseDocument() error {
  49. // Expect journey
  50. if !p.check(lexer.TokenID) || p.peek().Value != "journey" {
  51. return p.error("expected 'journey'")
  52. }
  53. p.advance()
  54. // Parse statements
  55. for !p.isAtEnd() {
  56. if err := p.parseStatement(); err != nil {
  57. return err
  58. }
  59. }
  60. return nil
  61. }
  62. // parseStatement parses individual User Journey statements
  63. func (p *JourneyParser) parseStatement() error {
  64. if p.isAtEnd() {
  65. return nil
  66. }
  67. switch {
  68. case p.check(lexer.TokenNewline):
  69. p.advance() // Skip newlines
  70. return nil
  71. case p.checkKeyword("title"):
  72. return p.parseTitle()
  73. case p.checkKeyword("section"):
  74. return p.parseSection()
  75. case p.check(lexer.TokenID):
  76. // Task definition
  77. return p.parseTask()
  78. default:
  79. token := p.peek()
  80. return p.error(fmt.Sprintf("unexpected token: %s", token.Value))
  81. }
  82. }
  83. // parseTitle parses title statements
  84. func (p *JourneyParser) parseTitle() error {
  85. p.advance() // consume 'title'
  86. var titleParts []string
  87. for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
  88. titleParts = append(titleParts, p.advance().Value)
  89. }
  90. if len(titleParts) > 0 {
  91. title := strings.TrimSpace(strings.Join(titleParts, " "))
  92. p.diagram.Title = &title
  93. }
  94. return nil
  95. }
  96. // parseSection parses section statements
  97. func (p *JourneyParser) parseSection() error {
  98. p.advance() // consume 'section'
  99. var sectionParts []string
  100. for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
  101. sectionParts = append(sectionParts, p.advance().Value)
  102. }
  103. if len(sectionParts) > 0 {
  104. sectionName := strings.TrimSpace(strings.Join(sectionParts, " "))
  105. section := &ast.UserJourneySection{
  106. Name: sectionName,
  107. Tasks: make([]*ast.UserJourneyTask, 0),
  108. }
  109. p.diagram.Sections = append(p.diagram.Sections, section)
  110. }
  111. return nil
  112. }
  113. // parseTask parses task definitions
  114. func (p *JourneyParser) parseTask() error {
  115. // Parse task name
  116. var taskNameParts []string
  117. for !p.check(lexer.TokenColon) && !p.check(lexer.TokenNewline) && !p.isAtEnd() {
  118. taskNameParts = append(taskNameParts, p.advance().Value)
  119. }
  120. if len(taskNameParts) == 0 {
  121. return p.error("expected task name")
  122. }
  123. taskName := strings.TrimSpace(strings.Join(taskNameParts, " "))
  124. // Expect colon
  125. if !p.check(lexer.TokenColon) {
  126. return p.error("expected ':' after task name")
  127. }
  128. p.advance()
  129. // Parse score and people
  130. var taskDataParts []string
  131. for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
  132. taskDataParts = append(taskDataParts, p.advance().Value)
  133. }
  134. task := &ast.UserJourneyTask{
  135. Name: taskName,
  136. People: make([]string, 0),
  137. }
  138. // Parse task data (score : person1, person2, ...)
  139. if len(taskDataParts) > 0 {
  140. taskData := strings.TrimSpace(strings.Join(taskDataParts, " "))
  141. parts := strings.Split(taskData, ":")
  142. // Parse score
  143. if len(parts) > 0 {
  144. scoreStr := strings.TrimSpace(parts[0])
  145. if scoreStr != "" {
  146. if score, err := strconv.ParseFloat(scoreStr, 64); err == nil {
  147. task.Score = &score
  148. }
  149. }
  150. }
  151. // Parse people
  152. if len(parts) > 1 {
  153. peopleStr := strings.TrimSpace(parts[1])
  154. if peopleStr != "" {
  155. people := strings.Split(peopleStr, ",")
  156. for _, person := range people {
  157. person = strings.TrimSpace(person)
  158. if person != "" {
  159. task.People = append(task.People, person)
  160. }
  161. }
  162. }
  163. }
  164. }
  165. // Add task to current section or create default section
  166. if len(p.diagram.Sections) == 0 {
  167. section := &ast.UserJourneySection{
  168. Name: "User Journey",
  169. Tasks: make([]*ast.UserJourneyTask, 0),
  170. }
  171. p.diagram.Sections = append(p.diagram.Sections, section)
  172. }
  173. currentSection := p.diagram.Sections[len(p.diagram.Sections)-1]
  174. currentSection.Tasks = append(currentSection.Tasks, task)
  175. return nil
  176. }
  177. // Helper methods
  178. func (p *JourneyParser) check(tokenType lexer.TokenType) bool {
  179. if p.isAtEnd() {
  180. return false
  181. }
  182. return p.peek().Type == tokenType
  183. }
  184. func (p *JourneyParser) checkKeyword(keyword string) bool {
  185. if p.isAtEnd() {
  186. return false
  187. }
  188. token := p.peek()
  189. return token.Type == lexer.TokenID && strings.ToLower(token.Value) == strings.ToLower(keyword)
  190. }
  191. func (p *JourneyParser) advance() lexer.Token {
  192. if !p.isAtEnd() {
  193. p.current++
  194. }
  195. return p.previous()
  196. }
  197. func (p *JourneyParser) isAtEnd() bool {
  198. return p.current >= len(p.tokens) || p.peek().Type == lexer.TokenEOF
  199. }
  200. func (p *JourneyParser) peek() lexer.Token {
  201. if p.current >= len(p.tokens) {
  202. return lexer.Token{Type: lexer.TokenEOF}
  203. }
  204. return p.tokens[p.current]
  205. }
  206. func (p *JourneyParser) previous() lexer.Token {
  207. if p.current <= 0 {
  208. return lexer.Token{Type: lexer.TokenEOF}
  209. }
  210. return p.tokens[p.current-1]
  211. }
  212. func (p *JourneyParser) error(message string) error {
  213. token := p.peek()
  214. return fmt.Errorf("parse error at line %d, column %d: %s (got %s)",
  215. token.Line, token.Column, message, token.Type.String())
  216. }