lexer_test.go 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. package tests
  2. import (
  3. "testing"
  4. "github.com/stretchr/testify/assert"
  5. "github.com/stretchr/testify/require"
  6. "mermaid-go/pkg/lexer"
  7. )
  8. func TestLexer_TokenizeBasicGraph(t *testing.T) {
  9. input := "graph TD\nA --> B"
  10. l := lexer.NewLexer(input)
  11. tokens, err := l.Tokenize()
  12. require.NoError(t, err)
  13. require.NotEmpty(t, tokens)
  14. // Filter out whitespace for easier testing
  15. filtered := lexer.FilterTokens(tokens)
  16. expected := []lexer.TokenType{
  17. lexer.TokenGraph,
  18. lexer.TokenTD,
  19. lexer.TokenID, // A
  20. lexer.TokenArrowSolid, // -->
  21. lexer.TokenID, // B
  22. lexer.TokenEOF,
  23. }
  24. require.Len(t, filtered, len(expected))
  25. for i, expectedType := range expected {
  26. assert.Equal(t, expectedType, filtered[i].Type,
  27. "Token %d: expected %s, got %s", i, expectedType, filtered[i].Type)
  28. }
  29. }
  30. func TestLexer_TokenizeWithShapes(t *testing.T) {
  31. input := `A[Text] --> B(Round) --> C{Diamond}`
  32. l := lexer.NewLexer(input)
  33. tokens, err := l.Tokenize()
  34. require.NoError(t, err)
  35. filtered := lexer.FilterTokens(tokens)
  36. // Should tokenize: A [ Text ] --> B ( Round ) --> C { Diamond } EOF
  37. expectedTypes := []lexer.TokenType{
  38. lexer.TokenID, // A
  39. lexer.TokenOpenBracket, // [
  40. lexer.TokenID, // Text
  41. lexer.TokenCloseBracket, // ]
  42. lexer.TokenArrowSolid, // -->
  43. lexer.TokenID, // B
  44. lexer.TokenOpenParen, // (
  45. lexer.TokenID, // Round
  46. lexer.TokenCloseParen, // )
  47. lexer.TokenArrowSolid, // -->
  48. lexer.TokenID, // C
  49. lexer.TokenOpenBrace, // {
  50. lexer.TokenID, // Diamond
  51. lexer.TokenCloseBrace, // }
  52. lexer.TokenEOF,
  53. }
  54. require.Len(t, filtered, len(expectedTypes))
  55. for i, expectedType := range expectedTypes {
  56. assert.Equal(t, expectedType, filtered[i].Type,
  57. "Token %d: expected %s, got %s (value: %q)",
  58. i, expectedType, filtered[i].Type, filtered[i].Value)
  59. }
  60. }
  61. func TestLexer_TokenizeArrowTypes(t *testing.T) {
  62. testCases := []struct {
  63. input string
  64. expected lexer.TokenType
  65. }{
  66. {"-->", lexer.TokenArrowSolid},
  67. {"-.->", lexer.TokenArrowDotted},
  68. {"==>", lexer.TokenArrowThick},
  69. {"--x", lexer.TokenArrowCross},
  70. {"--o", lexer.TokenArrowCircle},
  71. {"---", lexer.TokenArrowOpen},
  72. }
  73. for _, tc := range testCases {
  74. t.Run(tc.input, func(t *testing.T) {
  75. l := lexer.NewLexer(tc.input)
  76. tokens, err := l.Tokenize()
  77. require.NoError(t, err)
  78. filtered := lexer.FilterTokens(tokens)
  79. require.Len(t, filtered, 2) // Arrow + EOF
  80. assert.Equal(t, tc.expected, filtered[0].Type)
  81. assert.Equal(t, tc.input, filtered[0].Value)
  82. })
  83. }
  84. }
  85. func TestLexer_TokenizeWithLabel(t *testing.T) {
  86. input := `A -->|Label| B`
  87. l := lexer.NewLexer(input)
  88. tokens, err := l.Tokenize()
  89. require.NoError(t, err)
  90. filtered := lexer.FilterTokens(tokens)
  91. expectedTypes := []lexer.TokenType{
  92. lexer.TokenID, // A
  93. lexer.TokenArrowSolid, // -->
  94. lexer.TokenPipe, // |
  95. lexer.TokenID, // Label
  96. lexer.TokenPipe, // |
  97. lexer.TokenID, // B
  98. lexer.TokenEOF,
  99. }
  100. require.Len(t, filtered, len(expectedTypes))
  101. for i, expectedType := range expectedTypes {
  102. assert.Equal(t, expectedType, filtered[i].Type,
  103. "Token %d: expected %s, got %s (value: %q)",
  104. i, expectedType, filtered[i].Type, filtered[i].Value)
  105. }
  106. }
  107. func TestLexer_TokenizeComments(t *testing.T) {
  108. input := `graph TD
  109. %% This is a comment
  110. A --> B`
  111. l := lexer.NewLexer(input)
  112. tokens, err := l.Tokenize()
  113. require.NoError(t, err)
  114. // Find comment token
  115. var commentFound bool
  116. for _, token := range tokens {
  117. if token.Type == lexer.TokenComment {
  118. commentFound = true
  119. assert.Equal(t, "%% This is a comment", token.Value)
  120. }
  121. }
  122. assert.True(t, commentFound, "Comment token not found")
  123. }