package tests import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "mermaid-go/pkg/lexer" ) func TestLexer_TokenizeBasicGraph(t *testing.T) { input := "graph TD\nA --> B" l := lexer.NewLexer(input) tokens, err := l.Tokenize() require.NoError(t, err) require.NotEmpty(t, tokens) // Filter out whitespace for easier testing filtered := lexer.FilterTokens(tokens) expected := []lexer.TokenType{ lexer.TokenGraph, lexer.TokenTD, lexer.TokenID, // A lexer.TokenArrowSolid, // --> lexer.TokenID, // B lexer.TokenEOF, } require.Len(t, filtered, len(expected)) for i, expectedType := range expected { assert.Equal(t, expectedType, filtered[i].Type, "Token %d: expected %s, got %s", i, expectedType, filtered[i].Type) } } func TestLexer_TokenizeWithShapes(t *testing.T) { input := `A[Text] --> B(Round) --> C{Diamond}` l := lexer.NewLexer(input) tokens, err := l.Tokenize() require.NoError(t, err) filtered := lexer.FilterTokens(tokens) // Should tokenize: A [ Text ] --> B ( Round ) --> C { Diamond } EOF expectedTypes := []lexer.TokenType{ lexer.TokenID, // A lexer.TokenOpenBracket, // [ lexer.TokenID, // Text lexer.TokenCloseBracket, // ] lexer.TokenArrowSolid, // --> lexer.TokenID, // B lexer.TokenOpenParen, // ( lexer.TokenID, // Round lexer.TokenCloseParen, // ) lexer.TokenArrowSolid, // --> lexer.TokenID, // C lexer.TokenOpenBrace, // { lexer.TokenID, // Diamond lexer.TokenCloseBrace, // } lexer.TokenEOF, } require.Len(t, filtered, len(expectedTypes)) for i, expectedType := range expectedTypes { assert.Equal(t, expectedType, filtered[i].Type, "Token %d: expected %s, got %s (value: %q)", i, expectedType, filtered[i].Type, filtered[i].Value) } } func TestLexer_TokenizeArrowTypes(t *testing.T) { testCases := []struct { input string expected lexer.TokenType }{ {"-->", lexer.TokenArrowSolid}, {"-.->", lexer.TokenArrowDotted}, {"==>", lexer.TokenArrowThick}, {"--x", lexer.TokenArrowCross}, {"--o", lexer.TokenArrowCircle}, {"---", lexer.TokenArrowOpen}, } for _, tc := range testCases { t.Run(tc.input, func(t *testing.T) { l := lexer.NewLexer(tc.input) tokens, err := l.Tokenize() require.NoError(t, err) filtered := lexer.FilterTokens(tokens) require.Len(t, filtered, 2) // Arrow + EOF assert.Equal(t, tc.expected, filtered[0].Type) assert.Equal(t, tc.input, filtered[0].Value) }) } } func TestLexer_TokenizeWithLabel(t *testing.T) { input := `A -->|Label| B` l := lexer.NewLexer(input) tokens, err := l.Tokenize() require.NoError(t, err) filtered := lexer.FilterTokens(tokens) expectedTypes := []lexer.TokenType{ lexer.TokenID, // A lexer.TokenArrowSolid, // --> lexer.TokenPipe, // | lexer.TokenID, // Label lexer.TokenPipe, // | lexer.TokenID, // B lexer.TokenEOF, } require.Len(t, filtered, len(expectedTypes)) for i, expectedType := range expectedTypes { assert.Equal(t, expectedType, filtered[i].Type, "Token %d: expected %s, got %s (value: %q)", i, expectedType, filtered[i].Type, filtered[i].Value) } } func TestLexer_TokenizeComments(t *testing.T) { input := `graph TD %% This is a comment A --> B` l := lexer.NewLexer(input) tokens, err := l.Tokenize() require.NoError(t, err) // Find comment token var commentFound bool for _, token := range tokens { if token.Type == lexer.TokenComment { commentFound = true assert.Equal(t, "%% This is a comment", token.Value) } } assert.True(t, commentFound, "Comment token not found") }