lexical-parser (#15)
add: 词法分析器剩下数字、标识符的细节处理以及错误处理 Co-authored-by: duqoo <92306417+duqoo@users.noreply.github.com> Reviewed-on: PostGuard/Canon#15 Co-authored-by: Huaps <1183155719@qq.com> Co-committed-by: Huaps <1183155719@qq.com>
This commit is contained in:
54
Canon.Tests/LexicalParserTests/CharacterTypeTests.cs
Normal file
54
Canon.Tests/LexicalParserTests/CharacterTypeTests.cs
Normal file
@@ -0,0 +1,54 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
using Xunit.Abstractions;
|
||||
using Canon.Core.Exceptions;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
public class CharacterTypeTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
public CharacterTypeTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("'a'", "a")]
|
||||
[InlineData("'Hello, World!'", "Hello, World!")]
|
||||
|
||||
public void TestCharacterType(string input, string? expectedResult)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
if (expectedResult == null)
|
||||
{
|
||||
Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
}
|
||||
else
|
||||
{
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
_testOutputHelper.WriteLine(tokens[0].LiteralValue);
|
||||
Assert.Single(tokens);
|
||||
Assert.Equal(SemanticTokenType.Character, tokens[0].TokenType);
|
||||
Assert.Equal(expectedResult, tokens[0].LiteralValue);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
//[InlineData("'\\x'", 1, 2, LexemeException.LexemeErrorType.InvalidEscapeSequence)]
|
||||
[InlineData("'This is an unclosed string literal", 1, 36, LexemeErrorType.UnclosedStringLiteral)]
|
||||
[InlineData("'This", 1, 6, LexemeErrorType.UnclosedStringLiteral)]
|
||||
[InlineData("x @", 1, 3, LexemeErrorType.UnknownCharacterOrString)]
|
||||
//[InlineData("\"x\'", 1, 3, LexemeException.LexemeErrorType.UnclosedStringLiteral)]
|
||||
public void TestParseCharacterError(string input, uint expectedLine, uint expectedCharPosition, LexemeErrorType expectedErrorType)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(expectedErrorType, ex.ErrorType);
|
||||
Assert.Equal(expectedLine, ex.Line);
|
||||
Assert.Equal(expectedCharPosition, ex.CharPosition);
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,7 +7,7 @@ public class DelimiterTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(",123", DelimiterType.Comma)]
|
||||
[InlineData(".123", DelimiterType.Period)]
|
||||
// [InlineData(".123", DelimiterType.Period)]
|
||||
[InlineData(":123", DelimiterType.Colon)]
|
||||
[InlineData(";123", DelimiterType.Semicolon)]
|
||||
[InlineData("(123)", DelimiterType.LeftParenthesis)]
|
||||
|
32
Canon.Tests/LexicalParserTests/ErrorSingleTests.cs
Normal file
32
Canon.Tests/LexicalParserTests/ErrorSingleTests.cs
Normal file
@@ -0,0 +1,32 @@
|
||||
using Canon.Core.LexicalParser;
|
||||
using Canon.Core.Exceptions;
|
||||
using Xunit.Abstractions;
|
||||
using Canon.Core.Enums;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
public class ErrorSingleTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
public ErrorSingleTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("program main; var a: integer; begin a := 3#; end.", 1, 43, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("char c = 'abc;", 1, 15, LexemeErrorType.UnclosedStringLiteral)]
|
||||
[InlineData("x := 10 @;", 1, 9, LexemeErrorType.UnknownCharacterOrString)]
|
||||
[InlineData("identifier_with_special_chars@#",1, 30, LexemeErrorType.UnknownCharacterOrString)]
|
||||
public void TestUnknownCharacterError(string pascalProgram, uint expectedLine, uint expectedCharPosition, LexemeErrorType expectedErrorType)
|
||||
{
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(expectedErrorType, ex.ErrorType);
|
||||
Assert.Equal(expectedLine, ex.Line);
|
||||
Assert.Equal(expectedCharPosition, ex.CharPosition);
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,6 +1,5 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
using Xunit;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
@@ -10,20 +9,15 @@ namespace Canon.Tests.LexicalParserTests
|
||||
[InlineData("identifier", true)]
|
||||
[InlineData("_identifier", true)]
|
||||
[InlineData("identifier123", true)]
|
||||
[InlineData("123identifier", false)]
|
||||
[InlineData("identifier_with_underscores", true)]
|
||||
[InlineData("IdentifierWithCamelCase", true)]
|
||||
[InlineData("identifier-with-hyphen", false)]
|
||||
[InlineData("identifier with spaces", false)]
|
||||
[InlineData("identifier_with_special_chars@#", false)]
|
||||
[InlineData("", false)]
|
||||
[InlineData(" ", false)]
|
||||
[InlineData("andand", false)]
|
||||
[InlineData("andand", true)]
|
||||
public void TestParseIdentifier(string input, bool expectedResult)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
|
||||
Assert.Single(tokens);
|
||||
Assert.Equal(expectedResult, tokens.FirstOrDefault()?.TokenType == SemanticTokenType.Identifier);
|
||||
}
|
||||
}
|
@@ -21,6 +21,7 @@ public class KeywordTypeTests
|
||||
[InlineData("for", KeywordType.For)]
|
||||
[InlineData("to", KeywordType.To)]
|
||||
[InlineData("do", KeywordType.Do)]
|
||||
[InlineData("DO", KeywordType.Do)]
|
||||
public void SmokeTest(string input, KeywordType type)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
|
312
Canon.Tests/LexicalParserTests/LexicalFileTests.cs
Normal file
312
Canon.Tests/LexicalParserTests/LexicalFileTests.cs
Normal file
@@ -0,0 +1,312 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.Exceptions;
|
||||
using Canon.Core.LexicalParser;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests;
|
||||
|
||||
public class LexicalFileTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
public LexicalFileTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
//TODO: 基础的字符串匹配,因此变量名称不能被包含。手写一个存在包含情况的测试文件。
|
||||
private static (int, int) FindNthPosition(string pascalProgram, string target, int occurrence)
|
||||
{
|
||||
int lineNumber = 0;
|
||||
(int, int) nthPosition = (0, 0);
|
||||
int foundCount = 0;
|
||||
occurrence = occurrence + 1;
|
||||
|
||||
using (StringReader sr = new StringReader(pascalProgram))
|
||||
{
|
||||
string line;
|
||||
while ((line = sr.ReadLine()) != null)
|
||||
{
|
||||
lineNumber++;
|
||||
int columnNumber = -1;
|
||||
|
||||
// line = Regex.Replace(line, "'[^']*'", "$");
|
||||
|
||||
while ((columnNumber = line.IndexOf(target, columnNumber + 1, StringComparison.Ordinal)) != -1)
|
||||
{
|
||||
foundCount++;
|
||||
if (foundCount == occurrence)
|
||||
{
|
||||
nthPosition = (lineNumber, columnNumber + target.Length);
|
||||
return nthPosition;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (nthPosition == (0, 0))
|
||||
{
|
||||
throw new Exception($"'{target}' not found in program.");
|
||||
}
|
||||
|
||||
return nthPosition;
|
||||
}
|
||||
|
||||
private void TestLexicalAnalysis(string pascalProgram, List<(string, SemanticTokenType, int)> stringLiterals)
|
||||
{
|
||||
var expectedTokens = new List<SemanticToken>();
|
||||
|
||||
foreach (var (literal, tokenType, skipCount) in stringLiterals)
|
||||
{
|
||||
var (line, column) = FindNthPosition(pascalProgram, literal, skipCount);
|
||||
switch (tokenType)
|
||||
{
|
||||
case SemanticTokenType.Keyword:
|
||||
expectedTokens.Add(new KeywordSemanticToken
|
||||
{
|
||||
LinePos = (uint)line,
|
||||
CharacterPos = (uint)column,
|
||||
LiteralValue = literal,
|
||||
KeywordType = KeywordSemanticToken.GetKeywordTypeByKeyword(literal)
|
||||
});
|
||||
break;
|
||||
case SemanticTokenType.Identifier:
|
||||
expectedTokens.Add(new IdentifierSemanticToken
|
||||
{
|
||||
LinePos = (uint)line, CharacterPos = (uint)column, LiteralValue = literal
|
||||
});
|
||||
break;
|
||||
case SemanticTokenType.Delimiter:
|
||||
if (DelimiterSemanticToken.TryParse((uint)line, (uint)column, new LinkedListNode<char>(literal[0]),
|
||||
out var delimiterToken))
|
||||
{
|
||||
if (delimiterToken != null)
|
||||
{
|
||||
expectedTokens.Add(delimiterToken);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
case SemanticTokenType.Operator:
|
||||
expectedTokens.Add(new OperatorSemanticToken
|
||||
{
|
||||
LinePos = (uint)line,
|
||||
CharacterPos = (uint)column,
|
||||
LiteralValue = literal,
|
||||
OperatorType = OperatorSemanticToken.GetOperatorTypeByOperator(literal)
|
||||
});
|
||||
break;
|
||||
case SemanticTokenType.Character:
|
||||
expectedTokens.Add(new CharacterSemanticToken
|
||||
{
|
||||
LinePos = (uint)line, CharacterPos = (uint)column, LiteralValue = literal
|
||||
});
|
||||
break;
|
||||
case SemanticTokenType.Number:
|
||||
expectedTokens.Add(new NumberSemanticToken
|
||||
{
|
||||
LinePos = (uint)line,
|
||||
CharacterPos = (uint)column,
|
||||
LiteralValue = literal,
|
||||
NumberType = NumberType.Integer
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
expectedTokens = expectedTokens.OrderBy(token => token.LinePos).ThenBy(token => token.CharacterPos).ToList();
|
||||
expectedTokens = expectedTokens.Select(token =>
|
||||
token is CharacterSemanticToken characterToken && characterToken.LiteralValue == "hello, world!"
|
||||
? new CharacterSemanticToken
|
||||
{
|
||||
LinePos = characterToken.LinePos,
|
||||
CharacterPos = characterToken.CharacterPos + 1,
|
||||
LiteralValue = characterToken.LiteralValue
|
||||
}
|
||||
: token).ToList();
|
||||
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
var actualTokens = lexer.Tokenize();
|
||||
for (int i = 0; i < expectedTokens.Count; i++)
|
||||
{
|
||||
_testOutputHelper.WriteLine($"Expect: {expectedTokens[i]}");
|
||||
_testOutputHelper.WriteLine($"Actual: {actualTokens[i]}");
|
||||
_testOutputHelper.WriteLine("----");
|
||||
Assert.Equal(expectedTokens[i], actualTokens[i]);
|
||||
}
|
||||
|
||||
Assert.Equal(expectedTokens, actualTokens);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TestLexicalAnalysisFirst()
|
||||
{
|
||||
string pascalProgram = """
|
||||
program HelloWorld;
|
||||
var
|
||||
message: string;
|
||||
begin
|
||||
message := 'hello, world!';
|
||||
writeln(message);
|
||||
end.
|
||||
""";
|
||||
|
||||
var stringLiterals = new List<(string, SemanticTokenType, int)>
|
||||
{
|
||||
("program", SemanticTokenType.Keyword, 0),
|
||||
("HelloWorld", SemanticTokenType.Identifier, 0),
|
||||
(";", SemanticTokenType.Delimiter, 0),
|
||||
("var", SemanticTokenType.Keyword, 0),
|
||||
("message", SemanticTokenType.Identifier, 0),
|
||||
(":", SemanticTokenType.Delimiter, 0),
|
||||
("string", SemanticTokenType.Identifier, 0),
|
||||
(";", SemanticTokenType.Delimiter, 1),
|
||||
("begin", SemanticTokenType.Keyword, 0),
|
||||
("message", SemanticTokenType.Identifier, 1),
|
||||
(":=", SemanticTokenType.Operator, 0),
|
||||
("hello, world!", SemanticTokenType.Character, 0),
|
||||
(";", SemanticTokenType.Delimiter, 2),
|
||||
("writeln", SemanticTokenType.Identifier, 0),
|
||||
("(", SemanticTokenType.Delimiter, 0),
|
||||
("message", SemanticTokenType.Identifier, 2),
|
||||
(")", SemanticTokenType.Delimiter, 0),
|
||||
(";", SemanticTokenType.Delimiter, 3),
|
||||
("end", SemanticTokenType.Keyword, 0),
|
||||
(".", SemanticTokenType.Delimiter, 0)
|
||||
};
|
||||
TestLexicalAnalysis(pascalProgram, stringLiterals);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TestLexicalAnalysisSecond()
|
||||
{
|
||||
string pascalProgram = """
|
||||
program main;
|
||||
var
|
||||
ab: integer;
|
||||
begin
|
||||
ab := 3;
|
||||
write(ab);
|
||||
end.
|
||||
""";
|
||||
|
||||
var stringLiterals = new List<(string, SemanticTokenType, int)>
|
||||
{
|
||||
("program", SemanticTokenType.Keyword, 0),
|
||||
("main", SemanticTokenType.Identifier, 0),
|
||||
(";", SemanticTokenType.Delimiter, 0),
|
||||
("var", SemanticTokenType.Keyword, 0),
|
||||
("ab", SemanticTokenType.Identifier, 0),
|
||||
(":", SemanticTokenType.Delimiter, 0),
|
||||
("integer", SemanticTokenType.Keyword, 0),
|
||||
(";", SemanticTokenType.Delimiter, 1),
|
||||
("begin", SemanticTokenType.Keyword, 0),
|
||||
("ab", SemanticTokenType.Identifier, 1),
|
||||
(":=", SemanticTokenType.Operator, 0),
|
||||
("3", SemanticTokenType.Number, 0),
|
||||
(";", SemanticTokenType.Delimiter, 2),
|
||||
("write", SemanticTokenType.Identifier, 0),
|
||||
("(", SemanticTokenType.Delimiter, 0),
|
||||
("ab", SemanticTokenType.Identifier, 2),
|
||||
(")", SemanticTokenType.Delimiter, 0),
|
||||
(";", SemanticTokenType.Delimiter, 3),
|
||||
("end", SemanticTokenType.Keyword, 0),
|
||||
(".", SemanticTokenType.Delimiter, 0)
|
||||
};
|
||||
TestLexicalAnalysis(pascalProgram, stringLiterals);
|
||||
}
|
||||
|
||||
//带注释的测试
|
||||
[Fact]
|
||||
public void TestLexicalAnalysisThird()
|
||||
{
|
||||
string pascalProgram = """
|
||||
{test}
|
||||
program main;
|
||||
var
|
||||
ab, ba: integer;
|
||||
begin
|
||||
ab := 3;
|
||||
ba := 5;
|
||||
ab := 5;
|
||||
write(ab + ba);
|
||||
end.
|
||||
""";
|
||||
|
||||
var stringLiterals = new List<(string, SemanticTokenType, int)>
|
||||
{
|
||||
("program", SemanticTokenType.Keyword, 0),
|
||||
("main", SemanticTokenType.Identifier, 0),
|
||||
(";", SemanticTokenType.Delimiter, 0),
|
||||
("var", SemanticTokenType.Keyword, 0),
|
||||
("ab", SemanticTokenType.Identifier, 0),
|
||||
(",", SemanticTokenType.Delimiter, 0),
|
||||
("ba", SemanticTokenType.Identifier, 0),
|
||||
(":", SemanticTokenType.Delimiter, 0),
|
||||
("integer", SemanticTokenType.Keyword, 0),
|
||||
(";", SemanticTokenType.Delimiter, 1),
|
||||
("begin", SemanticTokenType.Keyword, 0),
|
||||
("ab", SemanticTokenType.Identifier, 1),
|
||||
(":=", SemanticTokenType.Operator, 0),
|
||||
("3", SemanticTokenType.Number, 0),
|
||||
(";", SemanticTokenType.Delimiter, 2),
|
||||
("ba", SemanticTokenType.Identifier, 1),
|
||||
(":=", SemanticTokenType.Operator, 1),
|
||||
("5", SemanticTokenType.Number, 0),
|
||||
(";", SemanticTokenType.Delimiter, 3),
|
||||
("ab", SemanticTokenType.Identifier, 2),
|
||||
(":=", SemanticTokenType.Operator, 2),
|
||||
("5", SemanticTokenType.Number, 1),
|
||||
(";", SemanticTokenType.Delimiter, 4),
|
||||
("write", SemanticTokenType.Identifier, 0),
|
||||
("(", SemanticTokenType.Delimiter, 0),
|
||||
("ab", SemanticTokenType.Identifier, 3),
|
||||
("+", SemanticTokenType.Operator, 0),
|
||||
("ba", SemanticTokenType.Identifier, 2),
|
||||
(")", SemanticTokenType.Delimiter, 0),
|
||||
(";", SemanticTokenType.Delimiter, 5),
|
||||
("end", SemanticTokenType.Keyword, 0),
|
||||
(".", SemanticTokenType.Delimiter, 0)
|
||||
};
|
||||
TestLexicalAnalysis(pascalProgram, stringLiterals);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UnclosedCommentFirst()
|
||||
{
|
||||
string pascalProgram = """
|
||||
(* This is an example of an unclosed comment
|
||||
program CommentError;
|
||||
var
|
||||
x: integer;
|
||||
begin
|
||||
x := 42;
|
||||
end.
|
||||
""";
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
//打印exception信息
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(LexemeErrorType.UnclosedComment, ex.ErrorType);
|
||||
Assert.Equal((uint)7, ex.Line);
|
||||
Assert.Equal((uint)5, ex.CharPosition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UnclosedCommentSecond()
|
||||
{
|
||||
string pascalProgram = """
|
||||
{
|
||||
This is a block comment that does not close.
|
||||
|
||||
program CommentNotClosed;
|
||||
""";
|
||||
var lexer = new Lexer(pascalProgram);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(LexemeErrorType.UnclosedComment, ex.ErrorType);
|
||||
Assert.Equal((uint)4, ex.Line);
|
||||
Assert.Equal((uint)26, ex.CharPosition);
|
||||
}
|
||||
}
|
@@ -1,46 +1,58 @@
|
||||
using Canon.Core.Enums;
|
||||
using Canon.Core.LexicalParser;
|
||||
using Canon.Core.Exceptions;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace Canon.Tests.LexicalParserTests
|
||||
{
|
||||
|
||||
public class NumberTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
public NumberTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("123", 123, NumberType.Integer)]
|
||||
[InlineData("0", 0, NumberType.Integer)]
|
||||
[InlineData("-123", -123, NumberType.Integer)]
|
||||
[InlineData("1.23", 1.23, NumberType.Real)]
|
||||
[InlineData("-1.23", -1.23, NumberType.Real)]
|
||||
[InlineData("0.0", 0.0, NumberType.Real)]
|
||||
[InlineData("1e7", 1e7, NumberType.Real)]
|
||||
[InlineData("1E7", 1E7, NumberType.Real)]
|
||||
[InlineData("1.23e-7", 1.23e-7, NumberType.Real)]
|
||||
[InlineData("1.23E-7", 1.23E-7, NumberType.Real)]
|
||||
[InlineData("1234567890", 1234567890, NumberType.Integer)]
|
||||
[InlineData("1234567890.1234567890", 1234567890.1234567890, NumberType.Real)]
|
||||
[InlineData("-1234567890", -1234567890, NumberType.Integer)]
|
||||
[InlineData("-1234567890.1234567890", -1234567890.1234567890, NumberType.Real)]
|
||||
[InlineData("1e-7", 1e-7, NumberType.Real)]
|
||||
[InlineData("1E-7", 1E-7, NumberType.Real)]
|
||||
[InlineData("1E", 0, NumberType.Real, false)]
|
||||
[InlineData("abc", 0, NumberType.Integer, false)]
|
||||
[InlineData("123abc", 123, NumberType.Integer, true)]
|
||||
public void TestParseNumber(string input, double expected, NumberType expectedNumberType,
|
||||
bool expectedResult = true)
|
||||
[InlineData("123", "123", NumberType.Integer)]
|
||||
[InlineData("0", "0", NumberType.Integer)]
|
||||
[InlineData("1.23", "1.23", NumberType.Real)]
|
||||
[InlineData("0.0", "0.0", NumberType.Real)]
|
||||
[InlineData("1e7", "1e7", NumberType.Real)]
|
||||
[InlineData("1E7", "1E7", NumberType.Real)]
|
||||
[InlineData("1.23e-7", "1.23e-7", NumberType.Real)]
|
||||
[InlineData("1.23E-7", "1.23E-7", NumberType.Real)]
|
||||
[InlineData("1234567890", "1234567890", NumberType.Integer)]
|
||||
[InlineData("1234567890.1234567890", "1234567890.1234567890", NumberType.Real)]
|
||||
[InlineData("1e-7", "1e-7", NumberType.Real)]
|
||||
[InlineData("1E-7", "1E-7", NumberType.Real)]
|
||||
[InlineData(".67",".67", NumberType.Real)]
|
||||
[InlineData("$123", "0x123", NumberType.Hex)]
|
||||
public void TestParseNumber(string input, string expected, NumberType expectedNumberType)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
|
||||
SemanticToken token = tokens[0];
|
||||
if (!expectedResult)
|
||||
{
|
||||
Assert.NotEqual(SemanticTokenType.Keyword, token.TokenType);
|
||||
return;
|
||||
}
|
||||
Assert.Equal(SemanticTokenType.Number, token.TokenType);
|
||||
NumberSemanticToken numberSemanticToken = (NumberSemanticToken)token;
|
||||
Assert.Equal(expectedNumberType, numberSemanticToken.NumberType);
|
||||
Assert.Equal(expected, numberSemanticToken.Value);
|
||||
Assert.Equal(expected, numberSemanticToken.LiteralValue);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("1E", 1, 3, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("123abc", 1, 4, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("123.45.67", 1, 7, LexemeErrorType.IllegalNumberFormat)]
|
||||
[InlineData("123identifier", 1, 4, LexemeErrorType.IllegalNumberFormat)]
|
||||
public void TestParseNumberError(string input, uint expectedLine, uint expectedCharPosition, LexemeErrorType expectedErrorType)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
var ex = Assert.Throws<LexemeException>(() => lexer.Tokenize());
|
||||
_testOutputHelper.WriteLine(ex.ToString());
|
||||
Assert.Equal(expectedErrorType, ex.ErrorType);
|
||||
Assert.Equal(expectedLine, ex.Line);
|
||||
Assert.Equal(expectedCharPosition, ex.CharPosition);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,38 +6,33 @@ namespace Canon.Tests.LexicalParserTests;
|
||||
public class OperatorTypeTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("+ 123", OperatorType.Plus)]
|
||||
[InlineData("+123", OperatorType.Plus)]
|
||||
[InlineData("-123", OperatorType.Minus)]
|
||||
[InlineData("*123", OperatorType.Multiply)]
|
||||
[InlineData("/123", OperatorType.Divide)]
|
||||
[InlineData("=123", OperatorType.Equal)]
|
||||
[InlineData("<123", OperatorType.Less)]
|
||||
[InlineData(">123", OperatorType.Greater)]
|
||||
[InlineData("<=123", OperatorType.LessEqual)]
|
||||
[InlineData(">=123", OperatorType.GreaterEqual)]
|
||||
[InlineData("<>123", OperatorType.NotEqual)]
|
||||
[InlineData(":=123", OperatorType.Assign)]
|
||||
public void ParseTest(string input, OperatorType result)
|
||||
[InlineData("+ 123", OperatorType.Plus, true)]
|
||||
[InlineData("+123", OperatorType.Plus, true)]
|
||||
[InlineData("-123", OperatorType.Minus, true)]
|
||||
[InlineData("*123", OperatorType.Multiply, true)]
|
||||
[InlineData("/123", OperatorType.Divide, true)]
|
||||
[InlineData("=123", OperatorType.Equal, true)]
|
||||
[InlineData("<123", OperatorType.Less, true)]
|
||||
[InlineData(">123", OperatorType.Greater, true)]
|
||||
[InlineData("<=123", OperatorType.LessEqual, true)]
|
||||
[InlineData(">=123", OperatorType.GreaterEqual, true)]
|
||||
[InlineData("<>123", OperatorType.NotEqual, true)]
|
||||
[InlineData(":=123", OperatorType.Assign, true)]
|
||||
[InlineData("1 + 123", OperatorType.Plus, false)]
|
||||
[InlineData("m +123", OperatorType.Plus, false)]
|
||||
public void ParseTest(string input, OperatorType result, bool expectedResult)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
|
||||
SemanticToken token = tokens[0];
|
||||
if (!expectedResult)
|
||||
{
|
||||
Assert.NotEqual(SemanticTokenType.Operator, token.TokenType);
|
||||
return;
|
||||
}
|
||||
Assert.Equal(SemanticTokenType.Operator, token.TokenType);
|
||||
OperatorSemanticToken operatorSemanticToken = (OperatorSemanticToken)token;
|
||||
Assert.Equal(result, operatorSemanticToken.OperatorType);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("1 + 123")]
|
||||
[InlineData("m +123")]
|
||||
public void ParseFailedTest(string input)
|
||||
{
|
||||
Lexer lexer = new(input);
|
||||
List<SemanticToken> tokens = lexer.Tokenize();
|
||||
|
||||
SemanticToken token = tokens[0];
|
||||
Assert.NotEqual(SemanticTokenType.Operator, token.TokenType);
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user