flowchart.go 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837
  1. // Package parser provides syntax analysis for Mermaid diagrams.
  2. // Based on the grammar rules from flow.jison in mermaid.js
  3. package parser
  4. import (
  5. "fmt"
  6. "strings"
  7. "mermaid-go/pkg/ast"
  8. "mermaid-go/pkg/lexer"
  9. )
  10. // Parser implements recursive descent parsing for Mermaid flowcharts
  11. // Following the grammar structure from flow.jison
  12. type Parser struct {
  13. tokens []lexer.Token
  14. current int
  15. flowDB *FlowDB
  16. }
  17. // FlowDB manages the state during parsing, mirroring mermaid.js FlowDB
  18. type FlowDB struct {
  19. vertexCounter int
  20. vertices map[string]*ast.FlowVertex
  21. edges []*ast.FlowEdge
  22. classes map[string]*ast.FlowClass
  23. subGraphs []*ast.FlowSubGraph
  24. subGraphLookup map[string]*ast.FlowSubGraph
  25. tooltips map[string]string
  26. direction string
  27. version string
  28. defaultStyle []string
  29. defaultInterpolate string
  30. }
  31. // NewFlowDB creates a new flow database
  32. func NewFlowDB() *FlowDB {
  33. return &FlowDB{
  34. vertices: make(map[string]*ast.FlowVertex),
  35. edges: make([]*ast.FlowEdge, 0),
  36. classes: make(map[string]*ast.FlowClass),
  37. subGraphs: make([]*ast.FlowSubGraph, 0),
  38. subGraphLookup: make(map[string]*ast.FlowSubGraph),
  39. tooltips: make(map[string]string),
  40. version: "gen-2",
  41. }
  42. }
  43. // NewParser creates a new parser
  44. func NewParser() *Parser {
  45. return &Parser{
  46. flowDB: NewFlowDB(),
  47. }
  48. }
  49. // NewFlowchartParser creates a new flowchart parser (alias for NewParser)
  50. func NewFlowchartParser() *Parser {
  51. return NewParser()
  52. }
  53. // Parse parses the input string and returns a flowchart diagram
  54. func (p *Parser) Parse(input string) (ast.Diagram, error) {
  55. // Tokenize
  56. l := lexer.NewLexer(input)
  57. tokens, err := l.Tokenize()
  58. if err != nil {
  59. return nil, fmt.Errorf("lexical analysis failed: %w", err)
  60. }
  61. // Filter out whitespace and comments
  62. p.tokens = lexer.FilterTokens(tokens)
  63. p.current = 0
  64. // Reset parser state
  65. p.flowDB = NewFlowDB()
  66. // Parse according to grammar
  67. err = p.parseDocument()
  68. if err != nil {
  69. return nil, fmt.Errorf("syntax analysis failed: %w", err)
  70. }
  71. // Build final flowchart
  72. return p.buildFlowchart(), nil
  73. }
  74. // parseDocument implements the top-level grammar rule
  75. // document: graphStatement | document graphStatement
  76. func (p *Parser) parseDocument() error {
  77. for !p.isAtEnd() {
  78. if err := p.parseStatement(); err != nil {
  79. return err
  80. }
  81. }
  82. return nil
  83. }
  84. // parseStatement parses individual statements
  85. func (p *Parser) parseStatement() error {
  86. if p.isAtEnd() {
  87. return nil
  88. }
  89. token := p.peek()
  90. switch token.Type {
  91. case lexer.TokenGraph:
  92. return p.parseGraphStatement()
  93. case lexer.TokenSubgraph:
  94. return p.parseSubgraphStatement()
  95. case lexer.TokenClass:
  96. return p.parseClassStatement()
  97. case lexer.TokenClassDef:
  98. return p.parseClassDefStatement()
  99. case lexer.TokenStyle:
  100. return p.parseStyleStatement()
  101. case lexer.TokenLinkStyle:
  102. return p.parseLinkStyleStatement()
  103. case lexer.TokenClick:
  104. return p.parseClickStatement()
  105. case lexer.TokenNewline:
  106. p.advance() // Skip newlines
  107. return nil
  108. case lexer.TokenEOF:
  109. return nil
  110. default:
  111. // Try to parse as edge statement
  112. return p.parseEdgeStatement()
  113. }
  114. }
  115. // parseGraphStatement: GRAPH dir? (NL graphStatementList)?
  116. func (p *Parser) parseGraphStatement() error {
  117. if !p.check(lexer.TokenGraph) {
  118. return p.error("expected 'graph'")
  119. }
  120. p.advance()
  121. // Optional direction
  122. if p.checkDirection() {
  123. dir := p.advance()
  124. p.flowDB.direction = dir.Value
  125. }
  126. // Optional newline
  127. if p.check(lexer.TokenNewline) {
  128. p.advance()
  129. }
  130. return nil
  131. }
  132. // parseSubgraphStatement handles subgraph definitions
  133. func (p *Parser) parseSubgraphStatement() error {
  134. if !p.check(lexer.TokenSubgraph) {
  135. return p.error("expected 'subgraph'")
  136. }
  137. p.advance()
  138. // Parse subgraph ID (optional)
  139. var subgraphID string
  140. var title string
  141. if p.check(lexer.TokenID) {
  142. subgraphID = p.advance().Value
  143. } else if p.check(lexer.TokenString) {
  144. // Quoted title becomes both ID and title
  145. titleToken := p.advance().Value
  146. title = titleToken[1 : len(titleToken)-1] // Remove quotes
  147. subgraphID = title
  148. }
  149. // Check for explicit title in square brackets
  150. if p.check(lexer.TokenOpenBracket) {
  151. p.advance() // consume [
  152. titleParts := make([]string, 0)
  153. for !p.check(lexer.TokenCloseBracket) && !p.isAtEnd() {
  154. titleParts = append(titleParts, p.advance().Value)
  155. }
  156. if p.check(lexer.TokenCloseBracket) {
  157. p.advance() // consume ]
  158. title = strings.Join(titleParts, "")
  159. }
  160. }
  161. // Create subgraph
  162. subgraph := &ast.FlowSubGraph{
  163. ID: subgraphID,
  164. Title: title,
  165. LabelType: "text",
  166. Classes: make([]string, 0),
  167. Nodes: make([]string, 0),
  168. }
  169. // Store current parsing state
  170. oldCurrent := p.current
  171. // Parse subgraph content until 'end'
  172. for !p.check(lexer.TokenEnd) && !p.isAtEnd() {
  173. if p.check(lexer.TokenNewline) {
  174. p.advance()
  175. continue
  176. }
  177. // Save state before parsing statement
  178. beforeStatement := p.current
  179. // Try to parse as edge statement (this will add vertices and edges)
  180. err := p.parseEdgeStatement()
  181. if err != nil {
  182. // If edge parsing failed, skip to next statement
  183. p.current = beforeStatement
  184. p.skipToNextStatement()
  185. continue
  186. }
  187. // Collect all vertices referenced in the statements parsed within this subgraph
  188. for i := oldCurrent; i < p.current; i++ {
  189. token := p.tokens[i]
  190. if token.Type == lexer.TokenID {
  191. // Check if this ID is a vertex
  192. if vertex, exists := p.flowDB.vertices[token.Value]; exists {
  193. // Add to subgraph nodes if not already present
  194. found := false
  195. for _, nodeID := range subgraph.Nodes {
  196. if nodeID == vertex.ID {
  197. found = true
  198. break
  199. }
  200. }
  201. if !found {
  202. subgraph.Nodes = append(subgraph.Nodes, vertex.ID)
  203. }
  204. }
  205. }
  206. }
  207. }
  208. if p.check(lexer.TokenEnd) {
  209. p.advance()
  210. }
  211. // Add subgraph to flowDB
  212. p.flowDB.subGraphs = append(p.flowDB.subGraphs, subgraph)
  213. p.flowDB.subGraphLookup[subgraphID] = subgraph
  214. return nil
  215. }
  216. // parseClassStatement handles class assignments
  217. func (p *Parser) parseClassStatement() error {
  218. if !p.check(lexer.TokenClass) {
  219. return p.error("expected 'class'")
  220. }
  221. p.advance()
  222. // Parse node list (comma separated)
  223. nodeIDs := make([]string, 0)
  224. for {
  225. if !p.check(lexer.TokenID) {
  226. break
  227. }
  228. nodeIDs = append(nodeIDs, p.advance().Value)
  229. if p.check(lexer.TokenComma) {
  230. p.advance() // consume comma
  231. } else {
  232. break
  233. }
  234. }
  235. // Parse class name
  236. if !p.check(lexer.TokenID) {
  237. return p.error("expected class name")
  238. }
  239. className := p.advance().Value
  240. // Apply class to nodes
  241. for _, nodeID := range nodeIDs {
  242. // Ensure vertex exists
  243. if _, exists := p.flowDB.vertices[nodeID]; !exists {
  244. p.addVertex(nodeID, "", "")
  245. }
  246. vertex := p.flowDB.vertices[nodeID]
  247. vertex.Classes = append(vertex.Classes, className)
  248. }
  249. return nil
  250. }
  251. // parseClassDefStatement handles class definitions
  252. func (p *Parser) parseClassDefStatement() error {
  253. if !p.check(lexer.TokenClassDef) {
  254. return p.error("expected 'classDef'")
  255. }
  256. p.advance()
  257. // Parse class name
  258. if !p.check(lexer.TokenID) {
  259. return p.error("expected class name")
  260. }
  261. className := p.advance().Value
  262. // Parse style definitions (everything until newline)
  263. styles := make([]string, 0)
  264. for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
  265. token := p.advance()
  266. styles = append(styles, token.Value)
  267. }
  268. // Create class definition
  269. class := &ast.FlowClass{
  270. ID: className,
  271. Styles: styles,
  272. TextStyles: make([]string, 0),
  273. }
  274. p.flowDB.classes[className] = class
  275. return nil
  276. }
  277. // parseStyleStatement handles style definitions
  278. func (p *Parser) parseStyleStatement() error {
  279. if !p.check(lexer.TokenStyle) {
  280. return p.error("expected 'style'")
  281. }
  282. p.advance()
  283. // Parse node ID
  284. if !p.check(lexer.TokenID) {
  285. return p.error("expected node ID")
  286. }
  287. nodeID := p.advance().Value
  288. // Parse style definitions (everything until newline)
  289. styles := make([]string, 0)
  290. for !p.check(lexer.TokenNewline) && !p.isAtEnd() {
  291. token := p.advance()
  292. styles = append(styles, token.Value)
  293. }
  294. // Ensure vertex exists
  295. if _, exists := p.flowDB.vertices[nodeID]; !exists {
  296. p.addVertex(nodeID, "", "")
  297. }
  298. // Apply styles to vertex
  299. vertex := p.flowDB.vertices[nodeID]
  300. vertex.Styles = append(vertex.Styles, styles...)
  301. return nil
  302. }
  303. // parseLinkStyleStatement handles link style definitions
  304. func (p *Parser) parseLinkStyleStatement() error {
  305. if !p.check(lexer.TokenLinkStyle) {
  306. return p.error("expected 'linkStyle'")
  307. }
  308. p.advance()
  309. // Skip implementation for now
  310. return p.skipToNextStatement()
  311. }
  312. // parseClickStatement handles click event definitions
  313. func (p *Parser) parseClickStatement() error {
  314. if !p.check(lexer.TokenClick) {
  315. return p.error("expected 'click'")
  316. }
  317. p.advance() // consume 'click'
  318. // Parse node ID
  319. if !p.check(lexer.TokenID) {
  320. return p.error("expected node ID after 'click'")
  321. }
  322. nodeID := p.advance().Value
  323. // Parse click action (callback or href)
  324. clickEvent := &ast.ClickEvent{
  325. NodeID: nodeID,
  326. }
  327. if p.check(lexer.TokenID) || p.check(lexer.TokenString) {
  328. action := p.advance().Value
  329. // Remove quotes if it's a string
  330. if strings.HasPrefix(action, "\"") && strings.HasSuffix(action, "\"") {
  331. action = action[1 : len(action)-1]
  332. }
  333. // Check if it's a callback (function call) or URL
  334. if strings.Contains(action, "http") || strings.Contains(action, "www.") {
  335. clickEvent.Link = &action
  336. } else {
  337. clickEvent.Callback = &action
  338. }
  339. }
  340. // Parse optional target for links
  341. if p.check(lexer.TokenString) {
  342. target := p.advance().Value
  343. target = target[1 : len(target)-1] // Remove quotes
  344. clickEvent.Target = &target
  345. }
  346. // Apply click event to vertex
  347. if vertex, exists := p.flowDB.vertices[nodeID]; exists {
  348. vertex.OnClick = clickEvent
  349. } else {
  350. // Ensure vertex exists
  351. p.addVertex(nodeID, "", "")
  352. p.flowDB.vertices[nodeID].OnClick = clickEvent
  353. }
  354. return nil
  355. }
  356. // parseEdgeStatement parses edge definitions
  357. // This is the core parsing logic for flowchart connections
  358. func (p *Parser) parseEdgeStatement() error {
  359. // Parse start vertex
  360. startVertex, err := p.parseVertex()
  361. if err != nil {
  362. return err
  363. }
  364. // Parse edge
  365. edge, err := p.parseEdge()
  366. if err != nil {
  367. return err
  368. }
  369. // Parse end vertex
  370. endVertex, err := p.parseVertex()
  371. if err != nil {
  372. return err
  373. }
  374. // Create edge in flowDB
  375. return p.addEdge(startVertex, endVertex, edge)
  376. }
  377. // parseVertex parses vertex definitions with shapes
  378. // Examples: A[Text], B(Text), C{Text}, etc.
  379. func (p *Parser) parseVertex() (*VertexInfo, error) {
  380. if !p.check(lexer.TokenID) {
  381. return nil, p.error("expected vertex identifier")
  382. }
  383. id := p.advance().Value
  384. vertex := &VertexInfo{ID: id}
  385. // Check for shape definition
  386. if p.checkShapeStart() {
  387. shape, text, err := p.parseShape()
  388. if err != nil {
  389. return nil, err
  390. }
  391. vertex.Shape = shape
  392. vertex.Text = text
  393. // Add vertex to flowDB
  394. p.addVertex(id, text, shape)
  395. }
  396. return vertex, nil
  397. }
  398. // VertexInfo holds parsed vertex information
  399. type VertexInfo struct {
  400. ID string
  401. Text string
  402. Shape ast.FlowVertexTypeParam
  403. }
  404. // EdgeInfo holds parsed edge information
  405. type EdgeInfo struct {
  406. Type string
  407. Text string
  408. Length int
  409. Stroke ast.FlowEdgeStroke
  410. }
  411. // parseShape parses shape definitions [text], (text), {text}, etc.
  412. func (p *Parser) parseShape() (ast.FlowVertexTypeParam, string, error) {
  413. startToken := p.peek()
  414. var shape ast.FlowVertexTypeParam
  415. var endToken lexer.TokenType
  416. switch startToken.Type {
  417. case lexer.TokenOpenBracket:
  418. // Check for special bracket shapes
  419. if p.checkSequence([]lexer.TokenType{lexer.TokenSlash}) {
  420. shape = ast.VertexTypeLeanRight
  421. p.advance() // consume [
  422. p.advance() // consume /
  423. endToken = lexer.TokenSlash
  424. } else if p.checkSequence([]lexer.TokenType{lexer.TokenBackslash}) {
  425. shape = ast.VertexTypeLeanLeft
  426. p.advance() // consume [
  427. p.advance() // consume \
  428. endToken = lexer.TokenBackslash
  429. } else {
  430. shape = ast.VertexTypeRect
  431. endToken = lexer.TokenCloseBracket
  432. p.advance() // consume [
  433. }
  434. case lexer.TokenOpenParen:
  435. if p.checkNext(lexer.TokenOpenParen) { // ((text))
  436. shape = ast.VertexTypeCircle
  437. p.advance() // skip first (
  438. p.advance() // skip second (
  439. endToken = lexer.TokenCloseParen
  440. } else { // (text)
  441. shape = ast.VertexTypeRound
  442. p.advance() // consume (
  443. endToken = lexer.TokenCloseParen
  444. }
  445. case lexer.TokenOpenBrace:
  446. shape = ast.VertexTypeDiamond
  447. p.advance() // consume {
  448. endToken = lexer.TokenCloseBrace
  449. case lexer.TokenOpenDoubleParen:
  450. shape = ast.VertexTypeCircle
  451. p.advance() // consume ((
  452. endToken = lexer.TokenCloseDoubleParen
  453. case lexer.TokenCloseAngle:
  454. // Check for flag shape >text]
  455. shape = ast.VertexTypeFlag
  456. p.advance() // consume >
  457. endToken = lexer.TokenCloseBracket
  458. default:
  459. return "", "", p.error("expected shape delimiter")
  460. }
  461. // Parse text content
  462. text := ""
  463. for !p.check(endToken) && !p.isAtEnd() {
  464. if endToken == lexer.TokenSlash && p.check(lexer.TokenSlash) {
  465. break
  466. }
  467. if endToken == lexer.TokenBackslash && p.check(lexer.TokenBackslash) {
  468. break
  469. }
  470. if p.check(lexer.TokenString) {
  471. // Remove quotes from string
  472. val := p.advance().Value
  473. text = val[1 : len(val)-1] // Remove surrounding quotes
  474. } else {
  475. text += p.advance().Value
  476. }
  477. }
  478. // Consume closing delimiter(s)
  479. switch endToken {
  480. case lexer.TokenSlash:
  481. if !p.check(lexer.TokenSlash) {
  482. return "", "", p.error("expected closing /")
  483. }
  484. p.advance() // consume /
  485. if !p.check(lexer.TokenCloseBracket) {
  486. return "", "", p.error("expected closing ]")
  487. }
  488. p.advance() // consume ]
  489. case lexer.TokenBackslash:
  490. if !p.check(lexer.TokenBackslash) {
  491. return "", "", p.error("expected closing \\")
  492. }
  493. p.advance() // consume \
  494. if !p.check(lexer.TokenCloseBracket) {
  495. return "", "", p.error("expected closing ]")
  496. }
  497. p.advance() // consume ]
  498. case lexer.TokenCloseParen:
  499. if !p.check(endToken) {
  500. return "", "", p.error("expected closing delimiter")
  501. }
  502. p.advance() // consume closing delimiter
  503. // Handle double paren closing
  504. if shape == ast.VertexTypeCircle && startToken.Type == lexer.TokenOpenParen {
  505. if !p.check(lexer.TokenCloseParen) {
  506. return "", "", p.error("expected second closing parenthesis")
  507. }
  508. p.advance()
  509. }
  510. default:
  511. if !p.check(endToken) {
  512. return "", "", p.error("expected closing delimiter")
  513. }
  514. p.advance() // consume closing delimiter
  515. }
  516. return shape, text, nil
  517. }
  518. // parseEdge parses edge definitions with arrows and labels
  519. func (p *Parser) parseEdge() (*EdgeInfo, error) {
  520. edge := &EdgeInfo{
  521. Stroke: ast.StrokeNormal,
  522. Length: 1,
  523. }
  524. // Parse edge label if present (|text|)
  525. if p.check(lexer.TokenPipe) {
  526. p.advance() // consume |
  527. // Collect text until next |
  528. text := ""
  529. for !p.check(lexer.TokenPipe) && !p.isAtEnd() {
  530. text += p.advance().Value
  531. }
  532. if !p.check(lexer.TokenPipe) {
  533. return nil, p.error("expected closing pipe for edge label")
  534. }
  535. p.advance() // consume closing |
  536. edge.Text = text
  537. }
  538. // Parse arrow type
  539. if !p.checkArrow() {
  540. return nil, p.error("expected arrow")
  541. }
  542. arrow := p.advance()
  543. edge.Type, edge.Stroke = p.parseArrowType(arrow.Value)
  544. return edge, nil
  545. }
  546. // parseArrowType extracts type and stroke from arrow token
  547. func (p *Parser) parseArrowType(arrow string) (string, ast.FlowEdgeStroke) {
  548. switch arrow {
  549. case "-->":
  550. return "arrow_point", ast.StrokeNormal
  551. case "-.->":
  552. return "arrow_point", ast.StrokeDotted
  553. case "==>":
  554. return "arrow_point", ast.StrokeThick
  555. case "--x":
  556. return "arrow_cross", ast.StrokeNormal
  557. case "--o":
  558. return "arrow_circle", ast.StrokeNormal
  559. case "---":
  560. return "arrow_open", ast.StrokeNormal
  561. default:
  562. return "arrow_point", ast.StrokeNormal
  563. }
  564. }
  565. // FlowDB manipulation methods (mirroring mermaid.js FlowDB)
  566. // addVertex adds a vertex to the flow database
  567. func (p *Parser) addVertex(id, text string, vertexType ast.FlowVertexTypeParam) {
  568. vertex := p.flowDB.vertices[id]
  569. if vertex == nil {
  570. vertex = &ast.FlowVertex{
  571. ID: id,
  572. LabelType: "text",
  573. DomID: fmt.Sprintf("flowchart-%s-%d", id, p.flowDB.vertexCounter),
  574. Styles: make([]string, 0),
  575. Classes: make([]string, 0),
  576. }
  577. p.flowDB.vertices[id] = vertex
  578. p.flowDB.vertexCounter++
  579. }
  580. if text != "" {
  581. vertex.Text = &text
  582. }
  583. if vertexType != "" {
  584. vertex.Type = &vertexType
  585. }
  586. }
  587. // addEdge adds an edge to the flow database
  588. func (p *Parser) addEdge(start, end *VertexInfo, edge *EdgeInfo) error {
  589. // Ensure vertices exist
  590. p.addVertex(start.ID, start.Text, start.Shape)
  591. p.addVertex(end.ID, end.Text, end.Shape)
  592. // Create edge
  593. flowEdge := &ast.FlowEdge{
  594. Start: start.ID,
  595. End: end.ID,
  596. Text: edge.Text,
  597. LabelType: "text",
  598. Classes: make([]string, 0),
  599. IsUserDefinedID: false,
  600. }
  601. if edge.Type != "" {
  602. flowEdge.Type = &edge.Type
  603. }
  604. if edge.Stroke != "" {
  605. flowEdge.Stroke = &edge.Stroke
  606. }
  607. // Generate edge ID
  608. edgeID := fmt.Sprintf("L-%s-%s-%d", start.ID, end.ID, len(p.flowDB.edges))
  609. flowEdge.ID = edgeID
  610. p.flowDB.edges = append(p.flowDB.edges, flowEdge)
  611. return nil
  612. }
  613. // buildFlowchart creates the final flowchart from flowDB state
  614. func (p *Parser) buildFlowchart() *ast.Flowchart {
  615. flowchart := ast.NewFlowchart()
  616. flowchart.Direction = p.flowDB.direction
  617. flowchart.Vertices = p.flowDB.vertices
  618. flowchart.Edges = p.flowDB.edges
  619. flowchart.Classes = p.flowDB.classes
  620. flowchart.SubGraphs = p.flowDB.subGraphs
  621. flowchart.SubGraphLookup = p.flowDB.subGraphLookup
  622. flowchart.Tooltips = p.flowDB.tooltips
  623. flowchart.Version = p.flowDB.version
  624. return flowchart
  625. }
  626. // Helper methods
  627. // check returns true if current token matches the given type
  628. func (p *Parser) check(tokenType lexer.TokenType) bool {
  629. if p.isAtEnd() {
  630. return false
  631. }
  632. return p.peek().Type == tokenType
  633. }
  634. // checkNext returns true if next token matches the given type
  635. func (p *Parser) checkNext(tokenType lexer.TokenType) bool {
  636. if p.current+1 >= len(p.tokens) {
  637. return false
  638. }
  639. return p.tokens[p.current+1].Type == tokenType
  640. }
  641. // checkSequence checks if the current position plus offset matches a sequence of token types
  642. func (p *Parser) checkSequence(types []lexer.TokenType) bool {
  643. for i, tokenType := range types {
  644. if p.current+1+i >= len(p.tokens) {
  645. return false
  646. }
  647. if p.tokens[p.current+1+i].Type != tokenType {
  648. return false
  649. }
  650. }
  651. return true
  652. }
  653. // checkDirection returns true if current token is a direction
  654. func (p *Parser) checkDirection() bool {
  655. if p.isAtEnd() {
  656. return false
  657. }
  658. tokenType := p.peek().Type
  659. return tokenType == lexer.TokenTD || tokenType == lexer.TokenTB ||
  660. tokenType == lexer.TokenBT || tokenType == lexer.TokenRL ||
  661. tokenType == lexer.TokenLR
  662. }
  663. // checkShapeStart returns true if current token starts a shape
  664. func (p *Parser) checkShapeStart() bool {
  665. if p.isAtEnd() {
  666. return false
  667. }
  668. tokenType := p.peek().Type
  669. return tokenType == lexer.TokenOpenBracket || tokenType == lexer.TokenOpenParen ||
  670. tokenType == lexer.TokenOpenBrace || tokenType == lexer.TokenOpenDoubleParen ||
  671. tokenType == lexer.TokenCloseAngle // for flag shape >text]
  672. }
  673. // checkArrow returns true if current token is an arrow
  674. func (p *Parser) checkArrow() bool {
  675. if p.isAtEnd() {
  676. return false
  677. }
  678. tokenType := p.peek().Type
  679. return tokenType == lexer.TokenArrowSolid || tokenType == lexer.TokenArrowDotted ||
  680. tokenType == lexer.TokenArrowThick || tokenType == lexer.TokenArrowOpen ||
  681. tokenType == lexer.TokenArrowPoint || tokenType == lexer.TokenArrowCross ||
  682. tokenType == lexer.TokenArrowCircle
  683. }
  684. // advance consumes and returns the current token
  685. func (p *Parser) advance() lexer.Token {
  686. if !p.isAtEnd() {
  687. p.current++
  688. }
  689. return p.previous()
  690. }
  691. // isAtEnd returns true if we've reached the end of tokens
  692. func (p *Parser) isAtEnd() bool {
  693. return p.current >= len(p.tokens) || p.peek().Type == lexer.TokenEOF
  694. }
  695. // peek returns the current token without advancing
  696. func (p *Parser) peek() lexer.Token {
  697. if p.current >= len(p.tokens) {
  698. return lexer.Token{Type: lexer.TokenEOF}
  699. }
  700. return p.tokens[p.current]
  701. }
  702. // previous returns the previous token
  703. func (p *Parser) previous() lexer.Token {
  704. if p.current <= 0 {
  705. return lexer.Token{Type: lexer.TokenEOF}
  706. }
  707. return p.tokens[p.current-1]
  708. }
  709. // error creates a parsing error
  710. func (p *Parser) error(message string) error {
  711. token := p.peek()
  712. return fmt.Errorf("parse error at line %d, column %d: %s (got %s)",
  713. token.Line, token.Column, message, token.Type)
  714. }
  715. // skipToNextStatement skips tokens until next statement
  716. func (p *Parser) skipToNextStatement() error {
  717. for !p.isAtEnd() && !p.check(lexer.TokenNewline) {
  718. p.advance()
  719. }
  720. if p.check(lexer.TokenNewline) {
  721. p.advance()
  722. }
  723. return nil
  724. }