refeat: ILexer接口适配 (#38)
Co-authored-by: Huaps <1183155719@qq.com> Co-authored-by: duqoo <92306417+duqoo@users.noreply.github.com> Reviewed-on: PostGuard/Canon#38
This commit is contained in:
@@ -2,13 +2,15 @@
|
||||
using Canon.Core.LexicalParser;
|
||||
using Xunit.Abstractions;
|
||||
using Canon.Core.Exceptions;
|
||||
using Canon.Core.Abstractions;
|
||||
using Canon.Tests.Utils;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
public class CharacterTypeTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
public CharacterTypeTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
@@ -20,16 +22,15 @@ namespace Canon.Tests.LexicalParserTests
|
||||
|
||||
public void TestCharacterType(string input, string? expectedResult)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(input));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
if (expectedResult == null)
|
||||
{
|
||||
Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
Assert.Throws<LexemeException>(() => tokens);
|
||||
}
|
||||
else
|
||||
{
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
_testOutputHelper.WriteLine(tokens[0].LiteralValue);
|
||||
Assert.Single(tokens);
|
||||
Assert.Equal(SemanticTokenType.Character, tokens[0].TokenType);
|
||||
Assert.Equal(expectedResult, tokens[0].LiteralValue);
|
||||
}
|
||||
@@ -43,8 +44,8 @@ namespace Canon.Tests.LexicalParserTests
|
||||
//[InlineData("\"x\'", 1, 3, LexemeException.LexemeErrorType.UnclosedStringLiteral)]
|
||||
public void TestParseCharacterError(string input, uint expectedLine, uint expectedCharPosition, LexemeErrorType expectedErrorType)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
|
||||
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(input)).ToList());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(expectedErrorType, ex.ErrorType);
|
||||
Assert.Equal(expectedLine, ex.Line);
|
||||
|
@@ -1,10 +1,13 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
|
||||
using Canon.Tests.Utils;
|
||||
using Canon.Core.Abstractions;
|
||||
namespace Canon.Tests.LexicalParserTests;
|
||||
|
||||
public class DelimiterTests
|
||||
{
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
|
||||
[Theory]
|
||||
[InlineData(",123", DelimiterType.Comma)]
|
||||
// [InlineData(".123", DelimiterType.Period)]
|
||||
@@ -16,8 +19,8 @@ public class DelimiterTests
|
||||
[InlineData("]asd", DelimiterType.RightSquareBracket)]
|
||||
public void SmokeTest(string input, DelimiterType type)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(input));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
|
||||
SemanticToken token = tokens[0];
|
||||
Assert.Equal(SemanticTokenType.Delimiter, token.TokenType);
|
||||
|
@@ -2,11 +2,14 @@
|
||||
using Canon.Core.Exceptions;
|
||||
using Xunit.Abstractions;
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.Abstractions;
|
||||
using Canon.Tests.Utils;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
public class ErrorSingleTests
|
||||
{
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
public ErrorSingleTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
@@ -20,9 +23,7 @@ namespace Canon.Tests.LexicalParserTests
|
||||
[InlineData("identifier_with_special_chars@#",1, 30, LexemeErrorType.UnknownCharacterOrString)]
|
||||
public void TestUnknownCharacterError(string pascalProgram, uint expectedLine, uint expectedCharPosition, LexemeErrorType expectedErrorType)
|
||||
{
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(pascalProgram)).ToList());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(expectedErrorType, ex.ErrorType);
|
||||
Assert.Equal(expectedLine, ex.Line);
|
||||
|
@@ -1,10 +1,13 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
|
||||
using Canon.Tests.Utils;
|
||||
using Canon.Core.Abstractions;
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
public class IdentifierTests
|
||||
{
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
|
||||
[Theory]
|
||||
[InlineData("identifier", true)]
|
||||
[InlineData("_identifier", true)]
|
||||
@@ -14,10 +17,9 @@ namespace Canon.Tests.LexicalParserTests
|
||||
[InlineData("andand", true)]
|
||||
public void TestParseIdentifier(string input, bool expectedResult)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(input));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
|
||||
Assert.Single(tokens);
|
||||
Assert.Equal(expectedResult, tokens.FirstOrDefault()?.TokenType == SemanticTokenType.Identifier);
|
||||
}
|
||||
}
|
||||
|
@@ -1,10 +1,14 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
using Canon.Tests.Utils;
|
||||
using Canon.Core.Abstractions;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests;
|
||||
|
||||
public class KeywordTypeTests
|
||||
{
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
|
||||
[Theory]
|
||||
[InlineData("program", KeywordType.Program)]
|
||||
[InlineData("const", KeywordType.Const)]
|
||||
@@ -24,8 +28,8 @@ public class KeywordTypeTests
|
||||
[InlineData("DO", KeywordType.Do)]
|
||||
public void SmokeTest(string input, KeywordType type)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(input));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
|
||||
SemanticToken token = tokens[0];
|
||||
Assert.Equal(SemanticTokenType.Keyword, token.TokenType);
|
||||
|
@@ -3,12 +3,15 @@ using Canon.Core.Enums;
|
||||
using Canon.Core.Exceptions;
|
||||
using Canon.Core.LexicalParser;
|
||||
using Xunit.Abstractions;
|
||||
using Canon.Tests.Utils;
|
||||
using Canon.Core.Abstractions;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests;
|
||||
|
||||
public class LexicalFileTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
|
||||
public LexicalFileTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
@@ -126,14 +129,16 @@ public class LexicalFileTests
|
||||
}
|
||||
: token).ToList();
|
||||
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
var actualTokens = lexer.Tokenize();
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
|
||||
var actualTokens = tokens;
|
||||
for (int i = 0; i < expectedTokens.Count; i++)
|
||||
{
|
||||
_testOutputHelper.WriteLine($"Expect: {expectedTokens[i]}");
|
||||
_testOutputHelper.WriteLine($"Actual: {actualTokens[i]}");
|
||||
_testOutputHelper.WriteLine("----");
|
||||
Assert.Equal(expectedTokens[i], actualTokens[i]);
|
||||
// Assert.Equal(expectedTokens[i], actualTokens[i]);
|
||||
}
|
||||
|
||||
Assert.Equal(expectedTokens, actualTokens);
|
||||
@@ -143,14 +148,14 @@ public class LexicalFileTests
|
||||
public void TestLexicalAnalysisFirst()
|
||||
{
|
||||
string pascalProgram = """
|
||||
program HelloWorld;
|
||||
var
|
||||
message: string;
|
||||
begin
|
||||
message := 'hello, world!';
|
||||
writeln(message);
|
||||
end.
|
||||
""";
|
||||
program HelloWorld;
|
||||
var
|
||||
message: string;
|
||||
begin
|
||||
message := 'hello, world!';
|
||||
writeln(message);
|
||||
end.
|
||||
""";
|
||||
|
||||
var stringLiterals = new List<(string, SemanticTokenType, int)>
|
||||
{
|
||||
@@ -182,14 +187,14 @@ public class LexicalFileTests
|
||||
public void TestLexicalAnalysisSecond()
|
||||
{
|
||||
string pascalProgram = """
|
||||
program main;
|
||||
var
|
||||
ab: integer;
|
||||
begin
|
||||
ab := 3;
|
||||
write(ab);
|
||||
end.
|
||||
""";
|
||||
program main;
|
||||
var
|
||||
ab: integer;
|
||||
begin
|
||||
ab := 3;
|
||||
write(ab);
|
||||
end.
|
||||
""";
|
||||
|
||||
var stringLiterals = new List<(string, SemanticTokenType, int)>
|
||||
{
|
||||
@@ -222,17 +227,17 @@ public class LexicalFileTests
|
||||
public void TestLexicalAnalysisThird()
|
||||
{
|
||||
string pascalProgram = """
|
||||
{test}
|
||||
program main;
|
||||
var
|
||||
ab, ba: integer;
|
||||
begin
|
||||
ab := 3;
|
||||
ba := 5;
|
||||
ab := 5;
|
||||
write(ab + ba);
|
||||
end.
|
||||
""";
|
||||
{test}
|
||||
program main;
|
||||
var
|
||||
ab, ba: integer;
|
||||
begin
|
||||
ab := 3;
|
||||
ba := 5;
|
||||
ab := 5;
|
||||
write(ab + ba);
|
||||
end.
|
||||
""";
|
||||
|
||||
var stringLiterals = new List<(string, SemanticTokenType, int)>
|
||||
{
|
||||
@@ -276,16 +281,15 @@ public class LexicalFileTests
|
||||
public void UnclosedCommentFirst()
|
||||
{
|
||||
string pascalProgram = """
|
||||
(* This is an example of an unclosed comment
|
||||
program CommentError;
|
||||
var
|
||||
x: integer;
|
||||
begin
|
||||
x := 42;
|
||||
end.
|
||||
""";
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
(* This is an example of an unclosed comment
|
||||
program CommentError;
|
||||
var
|
||||
x: integer;
|
||||
begin
|
||||
x := 42;
|
||||
end.
|
||||
""";
|
||||
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(pascalProgram)).ToList());
|
||||
//打印exception信息
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(LexemeErrorType.UnclosedComment, ex.ErrorType);
|
||||
@@ -302,11 +306,108 @@ public class LexicalFileTests
|
||||
|
||||
program CommentNotClosed;
|
||||
""";
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(pascalProgram)).ToList());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(LexemeErrorType.UnclosedComment, ex.ErrorType);
|
||||
Assert.Equal((uint)4, ex.Line);
|
||||
Assert.Equal((uint)26, ex.CharPosition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ClosedCommentFirst()
|
||||
{
|
||||
string pascalProgram = """
|
||||
program exFunction;
|
||||
var
|
||||
a, b, ret : integer;
|
||||
|
||||
begin
|
||||
a := 100;
|
||||
b := 200;
|
||||
(* calling a function to get max value
|
||||
*)
|
||||
ret := a - b;
|
||||
|
||||
|
||||
|
||||
end.
|
||||
""";
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
Assert.NotNull(tokens);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ClosedCommentSecond()
|
||||
{
|
||||
string pascalProgram = """
|
||||
program exFunction;
|
||||
var
|
||||
a, b, ret : integer;
|
||||
|
||||
begin
|
||||
a := 100;
|
||||
b := 200;
|
||||
(* calling a function to get max valued *)
|
||||
ret := a - b;
|
||||
|
||||
|
||||
|
||||
end.
|
||||
""";
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
Assert.NotNull(tokens);
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public void ClosedCommentThird()
|
||||
{
|
||||
string pascalProgram = """
|
||||
{
|
||||
This is a block comment that does closed.
|
||||
}
|
||||
program CommentClosed;
|
||||
""";
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
Assert.NotNull(tokens);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ClosedCommentFourth()
|
||||
{
|
||||
string pascalProgram = """
|
||||
{}
|
||||
program CommentClosed;
|
||||
""";
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
Assert.NotNull(tokens);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ClosedCommentFifth()
|
||||
{
|
||||
string pascalProgram = """
|
||||
{
|
||||
}
|
||||
program CommentClosed;
|
||||
""";
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
Assert.NotNull(tokens);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ClosedCommentSixth()
|
||||
{
|
||||
string pascalProgram = """
|
||||
(**)
|
||||
""";
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
Assert.NotNull(tokens);
|
||||
}
|
||||
}
|
||||
|
@@ -2,12 +2,14 @@
|
||||
using Canon.Core.LexicalParser;
|
||||
using Canon.Core.Exceptions;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
using Canon.Tests.Utils;
|
||||
using Canon.Core.Abstractions;
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
|
||||
public class NumberTests
|
||||
{
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
public NumberTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
@@ -31,8 +33,8 @@ namespace Canon.Tests.LexicalParserTests
|
||||
[InlineData("$123", "0x123", NumberType.Hex)]
|
||||
public void TestParseNumber(string input, string expected, NumberType expectedNumberType)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(input));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
SemanticToken token = tokens[0];
|
||||
Assert.Equal(SemanticTokenType.Number, token.TokenType);
|
||||
NumberSemanticToken numberSemanticToken = (NumberSemanticToken)token;
|
||||
@@ -41,14 +43,13 @@ namespace Canon.Tests.LexicalParserTests
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("1E", 1, 3, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("1E", 1, 2, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("123abc", 1, 4, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("123.45.67", 1, 7, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("123identifier", 1, 4, LexemeErrorType.IllegalNumberFormat)]
|
||||
public void TestParseNumberError(string input, uint expectedLine, uint expectedCharPosition, LexemeErrorType expectedErrorType)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(input)).ToList());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(expectedErrorType, ex.ErrorType);
|
||||
Assert.Equal(expectedLine, ex.Line);
|
||||
|
@@ -1,10 +1,13 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
|
||||
using Canon.Tests.Utils;
|
||||
using Canon.Core.Abstractions;
|
||||
namespace Canon.Tests.LexicalParserTests;
|
||||
|
||||
public class OperatorTypeTests
|
||||
{
|
||||
private readonly ILexer _lexer = new Lexer();
|
||||
|
||||
[Theory]
|
||||
[InlineData("+ 123", OperatorType.Plus, true)]
|
||||
[InlineData("+123", OperatorType.Plus, true)]
|
||||
@@ -22,8 +25,8 @@ public class OperatorTypeTests
|
||||
[InlineData("m +123", OperatorType.Plus, false)]
|
||||
public void ParseTest(string input, OperatorType result, bool expectedResult)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(input));
|
||||
List<SemanticToken> tokens = tokensEnumerable.ToList();
|
||||
|
||||
SemanticToken token = tokens[0];
|
||||
if (!expectedResult)
|
||||
|
Reference in New Issue
Block a user