| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155 |
- package tests
- import (
- "testing"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "mermaid-go/pkg/lexer"
- )
- func TestLexer_TokenizeBasicGraph(t *testing.T) {
- input := "graph TD\nA --> B"
- l := lexer.NewLexer(input)
- tokens, err := l.Tokenize()
- require.NoError(t, err)
- require.NotEmpty(t, tokens)
- // Filter out whitespace for easier testing
- filtered := lexer.FilterTokens(tokens)
- expected := []lexer.TokenType{
- lexer.TokenGraph,
- lexer.TokenTD,
- lexer.TokenID, // A
- lexer.TokenArrowSolid, // -->
- lexer.TokenID, // B
- lexer.TokenEOF,
- }
- require.Len(t, filtered, len(expected))
- for i, expectedType := range expected {
- assert.Equal(t, expectedType, filtered[i].Type,
- "Token %d: expected %s, got %s", i, expectedType, filtered[i].Type)
- }
- }
- func TestLexer_TokenizeWithShapes(t *testing.T) {
- input := `A[Text] --> B(Round) --> C{Diamond}`
- l := lexer.NewLexer(input)
- tokens, err := l.Tokenize()
- require.NoError(t, err)
- filtered := lexer.FilterTokens(tokens)
- // Should tokenize: A [ Text ] --> B ( Round ) --> C { Diamond } EOF
- expectedTypes := []lexer.TokenType{
- lexer.TokenID, // A
- lexer.TokenOpenBracket, // [
- lexer.TokenID, // Text
- lexer.TokenCloseBracket, // ]
- lexer.TokenArrowSolid, // -->
- lexer.TokenID, // B
- lexer.TokenOpenParen, // (
- lexer.TokenID, // Round
- lexer.TokenCloseParen, // )
- lexer.TokenArrowSolid, // -->
- lexer.TokenID, // C
- lexer.TokenOpenBrace, // {
- lexer.TokenID, // Diamond
- lexer.TokenCloseBrace, // }
- lexer.TokenEOF,
- }
- require.Len(t, filtered, len(expectedTypes))
- for i, expectedType := range expectedTypes {
- assert.Equal(t, expectedType, filtered[i].Type,
- "Token %d: expected %s, got %s (value: %q)",
- i, expectedType, filtered[i].Type, filtered[i].Value)
- }
- }
- func TestLexer_TokenizeArrowTypes(t *testing.T) {
- testCases := []struct {
- input string
- expected lexer.TokenType
- }{
- {"-->", lexer.TokenArrowSolid},
- {"-.->", lexer.TokenArrowDotted},
- {"==>", lexer.TokenArrowThick},
- {"--x", lexer.TokenArrowCross},
- {"--o", lexer.TokenArrowCircle},
- {"---", lexer.TokenArrowOpen},
- }
- for _, tc := range testCases {
- t.Run(tc.input, func(t *testing.T) {
- l := lexer.NewLexer(tc.input)
- tokens, err := l.Tokenize()
- require.NoError(t, err)
- filtered := lexer.FilterTokens(tokens)
- require.Len(t, filtered, 2) // Arrow + EOF
- assert.Equal(t, tc.expected, filtered[0].Type)
- assert.Equal(t, tc.input, filtered[0].Value)
- })
- }
- }
- func TestLexer_TokenizeWithLabel(t *testing.T) {
- input := `A -->|Label| B`
- l := lexer.NewLexer(input)
- tokens, err := l.Tokenize()
- require.NoError(t, err)
- filtered := lexer.FilterTokens(tokens)
- expectedTypes := []lexer.TokenType{
- lexer.TokenID, // A
- lexer.TokenArrowSolid, // -->
- lexer.TokenPipe, // |
- lexer.TokenID, // Label
- lexer.TokenPipe, // |
- lexer.TokenID, // B
- lexer.TokenEOF,
- }
- require.Len(t, filtered, len(expectedTypes))
- for i, expectedType := range expectedTypes {
- assert.Equal(t, expectedType, filtered[i].Type,
- "Token %d: expected %s, got %s (value: %q)",
- i, expectedType, filtered[i].Type, filtered[i].Value)
- }
- }
- func TestLexer_TokenizeComments(t *testing.T) {
- input := `graph TD
- %% This is a comment
- A --> B`
- l := lexer.NewLexer(input)
- tokens, err := l.Tokenize()
- require.NoError(t, err)
- // Find comment token
- var commentFound bool
- for _, token := range tokens {
- if token.Type == lexer.TokenComment {
- commentFound = true
- assert.Equal(t, "%% This is a comment", token.Value)
- }
- }
- assert.True(t, commentFound, "Comment token not found")
- }
|