misc: 删除冗余的代码 (#41)

Reviewed-on: PostGuard/Canon#41
This commit is contained in:
jackfiled 2024-04-20 11:48:05 +08:00
parent dbbab1c761
commit 0fdfef8854
5 changed files with 198 additions and 367 deletions

View File

@ -13,5 +13,7 @@ jobs:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget
save-always: true
- name: Build code
run: dotnet build
- name: Run test code
run: dotnet test

View File

@ -4,60 +4,57 @@ namespace Canon.Core.LexicalParser;
public static class LexemeFactory
{
public static SemanticToken MakeToken(SemanticTokenType tokenType,string literal,uint _line,uint _chPos)
public static SemanticToken MakeToken(SemanticTokenType tokenType,string literal,uint line,uint chPos)
{
SemanticToken? token;
switch (tokenType)
{
case SemanticTokenType.Character:
CharacterSemanticToken characterSemanticToken = new CharacterSemanticToken()
CharacterSemanticToken characterSemanticToken = new()
{
LinePos = _line, CharacterPos = _chPos, LiteralValue = literal,
LinePos = line, CharacterPos = chPos, LiteralValue = literal,
};
token = characterSemanticToken;
break;
case SemanticTokenType.Identifier:
IdentifierSemanticToken identifierSemanticToken = new IdentifierSemanticToken()
IdentifierSemanticToken identifierSemanticToken = new()
{
LinePos = _line, CharacterPos = _chPos, LiteralValue = literal,
LinePos = line, CharacterPos = chPos, LiteralValue = literal,
};
token = identifierSemanticToken;
break;
default:
throw new ArgumentOutOfRangeException(nameof(tokenType), tokenType, null);
throw new InvalidOperationException("Can only create Character or Identifier SemanticToken.");
}
return token;
}
public static KeywordSemanticToken MakeToken(KeywordType keywordType,string literal,uint _line,uint _chPos)
public static KeywordSemanticToken MakeToken(KeywordType keywordType,string literal,uint line,uint chPos)
{
KeywordSemanticToken keywordSemanticToken = new KeywordSemanticToken
KeywordSemanticToken keywordSemanticToken = new()
{
LinePos = _line,
CharacterPos = _chPos,
LinePos = line,
CharacterPos = chPos,
LiteralValue = literal,
KeywordType = keywordType
};
return keywordSemanticToken;
}
public static DelimiterSemanticToken MakeToken(DelimiterType delimiterType,string literal,uint _line,uint _chPos)
public static DelimiterSemanticToken MakeToken(DelimiterType delimiterType,string literal,uint line,uint chPos)
{
DelimiterSemanticToken delimiterSemanticToken = new DelimiterSemanticToken()
DelimiterSemanticToken delimiterSemanticToken = new()
{
LinePos = _line,
CharacterPos = _chPos,
LinePos = line,
CharacterPos = chPos,
LiteralValue = literal,
DelimiterType = delimiterType
};
return delimiterSemanticToken;
}
public static NumberSemanticToken MakeToken(NumberType numberType,string literal,uint _line,uint _chPos)
public static NumberSemanticToken MakeToken(NumberType numberType,string literal,uint line,uint chPos)
{
string temp = literal;
string result;
@ -70,10 +67,10 @@ public static class LexemeFactory
result = temp;
}
NumberSemanticToken numberSemanticToken = new NumberSemanticToken()
NumberSemanticToken numberSemanticToken = new()
{
LinePos = _line,
CharacterPos = _chPos,
LinePos = line,
CharacterPos = chPos,
LiteralValue = result,
NumberType = numberType
};
@ -81,12 +78,12 @@ public static class LexemeFactory
}
public static OperatorSemanticToken MakeToken(OperatorType operatorType,string literal,uint _line,uint _chPos)
public static OperatorSemanticToken MakeToken(OperatorType operatorType,string literal,uint line,uint chPos)
{
OperatorSemanticToken operatorSemanticToken = new OperatorSemanticToken()
OperatorSemanticToken operatorSemanticToken = new()
{
LinePos = _line,
CharacterPos = _chPos,
LinePos = line,
CharacterPos = chPos,
LiteralValue = literal,
OperatorType = operatorType
};

View File

@ -22,21 +22,10 @@ public class Lexer : ILexer
private uint _line = 1;
private uint _chPos;
// Token统计信息
private readonly Dictionary<SemanticTokenType, int> _tokenCount = new()
{
{ SemanticTokenType.Keyword, 0 },
{ SemanticTokenType.Number, 0 },
{ SemanticTokenType.Operator, 0 },
{ SemanticTokenType.Delimiter, 0 },
{ SemanticTokenType.Identifier, 0 },
{ SemanticTokenType.Character, 0 },
{ SemanticTokenType.End, 0 }
};
public IEnumerable<SemanticToken> Tokenize(ISourceReader reader)
{
_reader = reader;
_state = StateType.Start;
while (_state != StateType.Done)
{
@ -526,7 +515,11 @@ public class Lexer : ILexer
break;
}
AddToTokens(_semanticToken);
if (_semanticToken is null)
{
throw new InvalidOperationException();
}
_tokens.Add(_semanticToken);
_state = StateType.Start;
}
@ -614,9 +607,6 @@ public class Lexer : ILexer
private void AddToTokens(SemanticToken semanticToken)
{
_tokens.Add(semanticToken);
_tokenCount[semanticToken.TokenType]++;
Console.WriteLine($"<{semanticToken.TokenType}>");
Console.WriteLine(semanticToken.LiteralValue);
}
private void Cat()

View File

@ -54,12 +54,13 @@ public abstract class SemanticToken : IEquatable<SemanticToken>
/// </summary>
public static EndSemanticToken End => new()
{
LinePos = 0, CharacterPos = 0, LiteralValue = string.Empty
LinePos = uint.MaxValue, CharacterPos = uint.MaxValue, LiteralValue = string.Empty
};
public override string ToString()
{
return $"LinePos: {LinePos}, CharacterPos: {CharacterPos}, LiteralValue: {LiteralValue}, TokenType: {TokenType}";
return
$"LinePos: {LinePos}, CharacterPos: {CharacterPos}, LiteralValue: {LiteralValue}, TokenType: {TokenType}";
}
public bool Equals(SemanticToken? other)
@ -93,13 +94,6 @@ public abstract class SemanticToken : IEquatable<SemanticToken>
public class CharacterSemanticToken : SemanticToken
{
public override SemanticTokenType TokenType => SemanticTokenType.Character;
public static bool TryParse(uint linePos, uint characterPos, LinkedListNode<char> now,
out CharacterSemanticToken? token)
{
token = null;
return false;
}
}
/// <summary>
@ -146,7 +140,7 @@ public class DelimiterSemanticToken : SemanticToken
public override int GetHashCode()
{
return base.GetHashCode() ^ this.DelimiterType.GetHashCode();
return base.GetHashCode() ^ DelimiterType.GetHashCode();
}
}
@ -159,33 +153,34 @@ public class KeywordSemanticToken : SemanticToken
public required KeywordType KeywordType { get; init; }
public static readonly Dictionary<string, KeywordType> KeywordTypes = new Dictionary<string, KeywordType>(StringComparer.OrdinalIgnoreCase)
{
{ "program", KeywordType.Program },
{ "const", KeywordType.Const },
{ "var", KeywordType.Var },
{ "procedure", KeywordType.Procedure },
{ "function", KeywordType.Function },
{ "begin", KeywordType.Begin },
{ "end", KeywordType.End },
{ "array", KeywordType.Array },
{ "of", KeywordType.Of },
{ "if", KeywordType.If },
{ "then", KeywordType.Then },
{ "else", KeywordType.Else },
{ "for", KeywordType.For },
{ "to", KeywordType.To },
{ "do", KeywordType.Do },
{ "integer", KeywordType.Integer },
{ "real", KeywordType.Real },
{ "boolean", KeywordType.Boolean },
{ "character", KeywordType.Character },
{ "div", KeywordType.Divide }, // 注意: Pascal 使用 'div' 而不是 '/'
{ "not", KeywordType.Not },
{ "mod", KeywordType.Mod },
{ "and", KeywordType.And },
{ "or", KeywordType.Or }
};
public static readonly Dictionary<string, KeywordType> KeywordTypes =
new Dictionary<string, KeywordType>(StringComparer.OrdinalIgnoreCase)
{
{ "program", KeywordType.Program },
{ "const", KeywordType.Const },
{ "var", KeywordType.Var },
{ "procedure", KeywordType.Procedure },
{ "function", KeywordType.Function },
{ "begin", KeywordType.Begin },
{ "end", KeywordType.End },
{ "array", KeywordType.Array },
{ "of", KeywordType.Of },
{ "if", KeywordType.If },
{ "then", KeywordType.Then },
{ "else", KeywordType.Else },
{ "for", KeywordType.For },
{ "to", KeywordType.To },
{ "do", KeywordType.Do },
{ "integer", KeywordType.Integer },
{ "real", KeywordType.Real },
{ "boolean", KeywordType.Boolean },
{ "character", KeywordType.Character },
{ "div", KeywordType.Divide }, // 注意: Pascal 使用 'div' 而不是 '/'
{ "not", KeywordType.Not },
{ "mod", KeywordType.Mod },
{ "and", KeywordType.And },
{ "or", KeywordType.Or }
};
public static KeywordType GetKeywordTypeByKeyword(string keyword)
{
@ -199,56 +194,6 @@ public class KeywordSemanticToken : SemanticToken
}
}
public static bool TryParse(uint linePos, uint characterPos, LinkedListNode<char> now,
out KeywordSemanticToken? token)
{
string buffer = new([now.Value]);
if (now.Next is null)
{
// 没有比两个字符更短的关键字
token = null;
return false;
}
now = now.Next;
buffer += now.Value;
switch (buffer)
{
case "do":
token = new KeywordSemanticToken
{
LinePos = linePos,
CharacterPos = characterPos,
LiteralValue = "do",
KeywordType = KeywordType.Do
};
return true;
case "Of":
token = new KeywordSemanticToken
{
LinePos = linePos,
CharacterPos = characterPos,
LiteralValue = "of",
KeywordType = KeywordType.Of
};
return true;
case "If":
token = new KeywordSemanticToken
{
LinePos = linePos,
CharacterPos = characterPos,
LiteralValue = "if",
KeywordType = KeywordType.If
};
return true;
}
token = null;
return false;
}
public override int GetHashCode()
{
return base.GetHashCode() ^ this.KeywordType.GetHashCode();
@ -291,16 +236,9 @@ public class OperatorSemanticToken : SemanticToken
}
}
public static bool TryParse(uint linePos, uint characterPos, LinkedListNode<char> now,
out OperatorSemanticToken? token)
{
token = null;
return false;
}
public override int GetHashCode()
{
return base.GetHashCode() ^ this.OperatorType.GetHashCode();
return base.GetHashCode() ^ OperatorType.GetHashCode();
}
}
@ -315,7 +253,7 @@ public class NumberSemanticToken : SemanticToken
public override int GetHashCode()
{
return base.GetHashCode() ^ this.NumberType.GetHashCode();
return base.GetHashCode() ^ NumberType.GetHashCode();
}
}
@ -326,17 +264,13 @@ public class IdentifierSemanticToken : SemanticToken
{
public override SemanticTokenType TokenType => SemanticTokenType.Identifier;
public static bool TryParse(uint linePos, uint characterPos, LinkedListNode<char> now,
out IdentifierSemanticToken? token)
{
token = null;
return false;
}
/// <summary>
/// 标识符名称
/// </summary>
public string IdentifierName => LiteralValue.ToLower();
}
public class EndSemanticToken : SemanticToken
{
public override SemanticTokenType TokenType => SemanticTokenType.End;
}

View File

@ -1,149 +1,16 @@
using System.Text.RegularExpressions;
using Canon.Core.Enums;
using Canon.Core.Enums;
using Canon.Core.Exceptions;
using Canon.Core.LexicalParser;
using Xunit.Abstractions;
using Canon.Tests.Utils;
using Canon.Core.Abstractions;
using Xunit.Abstractions;
namespace Canon.Tests.LexicalParserTests;
public class LexicalFileTests
public class LexicalFileTests(ITestOutputHelper testOutputHelper)
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ILexer _lexer = new Lexer();
public LexicalFileTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
}
//TODO: 基础的字符串匹配,因此变量名称不能被包含。手写一个存在包含情况的测试文件。
private static (int, int) FindNthPosition(string pascalProgram, string target, int occurrence)
{
int lineNumber = 0;
(int, int) nthPosition = (0, 0);
int foundCount = 0;
occurrence = occurrence + 1;
using (StringReader sr = new StringReader(pascalProgram))
{
string line;
while ((line = sr.ReadLine()) != null)
{
lineNumber++;
int columnNumber = -1;
// line = Regex.Replace(line, "'[^']*'", "$");
while ((columnNumber = line.IndexOf(target, columnNumber + 1, StringComparison.Ordinal)) != -1)
{
foundCount++;
if (foundCount == occurrence)
{
nthPosition = (lineNumber, columnNumber + target.Length);
return nthPosition;
}
}
}
}
if (nthPosition == (0, 0))
{
throw new Exception($"'{target}' not found in program.");
}
return nthPosition;
}
private void TestLexicalAnalysis(string pascalProgram, List<(string, SemanticTokenType, int)> stringLiterals)
{
var expectedTokens = new List<SemanticToken>();
foreach (var (literal, tokenType, skipCount) in stringLiterals)
{
var (line, column) = FindNthPosition(pascalProgram, literal, skipCount);
switch (tokenType)
{
case SemanticTokenType.Keyword:
expectedTokens.Add(new KeywordSemanticToken
{
LinePos = (uint)line,
CharacterPos = (uint)column,
LiteralValue = literal,
KeywordType = KeywordSemanticToken.GetKeywordTypeByKeyword(literal)
});
break;
case SemanticTokenType.Identifier:
expectedTokens.Add(new IdentifierSemanticToken
{
LinePos = (uint)line, CharacterPos = (uint)column, LiteralValue = literal
});
break;
case SemanticTokenType.Delimiter:
if (DelimiterSemanticToken.TryParse((uint)line, (uint)column, new LinkedListNode<char>(literal[0]),
out var delimiterToken))
{
if (delimiterToken != null)
{
expectedTokens.Add(delimiterToken);
}
}
break;
case SemanticTokenType.Operator:
expectedTokens.Add(new OperatorSemanticToken
{
LinePos = (uint)line,
CharacterPos = (uint)column,
LiteralValue = literal,
OperatorType = OperatorSemanticToken.GetOperatorTypeByOperator(literal)
});
break;
case SemanticTokenType.Character:
expectedTokens.Add(new CharacterSemanticToken
{
LinePos = (uint)line, CharacterPos = (uint)column, LiteralValue = literal
});
break;
case SemanticTokenType.Number:
expectedTokens.Add(new NumberSemanticToken
{
LinePos = (uint)line,
CharacterPos = (uint)column,
LiteralValue = literal,
NumberType = NumberType.Integer
});
break;
}
}
expectedTokens = expectedTokens.OrderBy(token => token.LinePos).ThenBy(token => token.CharacterPos).ToList();
expectedTokens = expectedTokens.Select(token =>
token is CharacterSemanticToken characterToken && characterToken.LiteralValue == "hello, world!"
? new CharacterSemanticToken
{
LinePos = characterToken.LinePos,
CharacterPos = characterToken.CharacterPos + 1,
LiteralValue = characterToken.LiteralValue
}
: token).ToList();
IEnumerable<SemanticToken> tokensEnumerable = _lexer.Tokenize(new StringSourceReader(pascalProgram));
List<SemanticToken> tokens = tokensEnumerable.ToList();
var actualTokens = tokens;
for (int i = 0; i < expectedTokens.Count; i++)
{
_testOutputHelper.WriteLine($"Expect: {expectedTokens[i]}");
_testOutputHelper.WriteLine($"Actual: {actualTokens[i]}");
_testOutputHelper.WriteLine("----");
// Assert.Equal(expectedTokens[i], actualTokens[i]);
}
Assert.Equal(expectedTokens, actualTokens);
}
[Fact]
public void TestLexicalAnalysisFirst()
{
@ -157,30 +24,30 @@ public class LexicalFileTests
end.
""";
var stringLiterals = new List<(string, SemanticTokenType, int)>
{
("program", SemanticTokenType.Keyword, 0),
("HelloWorld", SemanticTokenType.Identifier, 0),
(";", SemanticTokenType.Delimiter, 0),
("var", SemanticTokenType.Keyword, 0),
("message", SemanticTokenType.Identifier, 0),
(":", SemanticTokenType.Delimiter, 0),
("string", SemanticTokenType.Identifier, 0),
(";", SemanticTokenType.Delimiter, 1),
("begin", SemanticTokenType.Keyword, 0),
("message", SemanticTokenType.Identifier, 1),
(":=", SemanticTokenType.Operator, 0),
("hello, world!", SemanticTokenType.Character, 0),
(";", SemanticTokenType.Delimiter, 2),
("writeln", SemanticTokenType.Identifier, 0),
("(", SemanticTokenType.Delimiter, 0),
("message", SemanticTokenType.Identifier, 2),
(")", SemanticTokenType.Delimiter, 0),
(";", SemanticTokenType.Delimiter, 3),
("end", SemanticTokenType.Keyword, 0),
(".", SemanticTokenType.Delimiter, 0)
};
TestLexicalAnalysis(pascalProgram, stringLiterals);
IEnumerable<SemanticToken> tokens = _lexer.Tokenize(new StringSourceReader(pascalProgram));
ValidateSemanticTokens(tokens, [
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Operator,
SemanticTokenType.Character,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter,
SemanticTokenType.End
]);
}
[Fact]
@ -196,30 +63,30 @@ public class LexicalFileTests
end.
""";
var stringLiterals = new List<(string, SemanticTokenType, int)>
{
("program", SemanticTokenType.Keyword, 0),
("main", SemanticTokenType.Identifier, 0),
(";", SemanticTokenType.Delimiter, 0),
("var", SemanticTokenType.Keyword, 0),
("ab", SemanticTokenType.Identifier, 0),
(":", SemanticTokenType.Delimiter, 0),
("integer", SemanticTokenType.Keyword, 0),
(";", SemanticTokenType.Delimiter, 1),
("begin", SemanticTokenType.Keyword, 0),
("ab", SemanticTokenType.Identifier, 1),
(":=", SemanticTokenType.Operator, 0),
("3", SemanticTokenType.Number, 0),
(";", SemanticTokenType.Delimiter, 2),
("write", SemanticTokenType.Identifier, 0),
("(", SemanticTokenType.Delimiter, 0),
("ab", SemanticTokenType.Identifier, 2),
(")", SemanticTokenType.Delimiter, 0),
(";", SemanticTokenType.Delimiter, 3),
("end", SemanticTokenType.Keyword, 0),
(".", SemanticTokenType.Delimiter, 0)
};
TestLexicalAnalysis(pascalProgram, stringLiterals);
IEnumerable<SemanticToken> tokens = _lexer.Tokenize(new StringSourceReader(pascalProgram));
ValidateSemanticTokens(tokens, [
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Operator,
SemanticTokenType.Number,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter
]);
}
//带注释的测试
@ -239,42 +106,74 @@ public class LexicalFileTests
end.
""";
var stringLiterals = new List<(string, SemanticTokenType, int)>
{
("program", SemanticTokenType.Keyword, 0),
("main", SemanticTokenType.Identifier, 0),
(";", SemanticTokenType.Delimiter, 0),
("var", SemanticTokenType.Keyword, 0),
("ab", SemanticTokenType.Identifier, 0),
(",", SemanticTokenType.Delimiter, 0),
("ba", SemanticTokenType.Identifier, 0),
(":", SemanticTokenType.Delimiter, 0),
("integer", SemanticTokenType.Keyword, 0),
(";", SemanticTokenType.Delimiter, 1),
("begin", SemanticTokenType.Keyword, 0),
("ab", SemanticTokenType.Identifier, 1),
(":=", SemanticTokenType.Operator, 0),
("3", SemanticTokenType.Number, 0),
(";", SemanticTokenType.Delimiter, 2),
("ba", SemanticTokenType.Identifier, 1),
(":=", SemanticTokenType.Operator, 1),
("5", SemanticTokenType.Number, 0),
(";", SemanticTokenType.Delimiter, 3),
("ab", SemanticTokenType.Identifier, 2),
(":=", SemanticTokenType.Operator, 2),
("5", SemanticTokenType.Number, 1),
(";", SemanticTokenType.Delimiter, 4),
("write", SemanticTokenType.Identifier, 0),
("(", SemanticTokenType.Delimiter, 0),
("ab", SemanticTokenType.Identifier, 3),
("+", SemanticTokenType.Operator, 0),
("ba", SemanticTokenType.Identifier, 2),
(")", SemanticTokenType.Delimiter, 0),
(";", SemanticTokenType.Delimiter, 5),
("end", SemanticTokenType.Keyword, 0),
(".", SemanticTokenType.Delimiter, 0)
};
TestLexicalAnalysis(pascalProgram, stringLiterals);
IEnumerable<SemanticToken> tokens = _lexer.Tokenize(new StringSourceReader(pascalProgram));
ValidateSemanticTokens(tokens, [
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Operator,
SemanticTokenType.Number,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Operator,
SemanticTokenType.Number,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Operator,
SemanticTokenType.Number,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Identifier,
SemanticTokenType.Operator,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter,
SemanticTokenType.End
]);
}
[Fact]
public void ReuseTest()
{
const string program1 = """
program main;
begin
end.
""";
IEnumerable<SemanticToken> tokens = _lexer.Tokenize(new StringSourceReader(program1));
ValidateSemanticTokens(tokens, [
SemanticTokenType.Keyword,
SemanticTokenType.Identifier,
SemanticTokenType.Delimiter,
SemanticTokenType.Keyword,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter
]);
const string test = "program begin end.";
tokens = _lexer.Tokenize(new StringSourceReader(test));
ValidateSemanticTokens(tokens, [
SemanticTokenType.Keyword,
SemanticTokenType.Keyword,
SemanticTokenType.Keyword,
SemanticTokenType.Delimiter
]);
}
[Fact]
@ -291,7 +190,7 @@ public class LexicalFileTests
""";
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(pascalProgram)).ToList());
//打印exception信息
_testOutputHelper.WriteLine(ex.ToString());
testOutputHelper.WriteLine(ex.ToString());
Assert.Equal(LexemeErrorType.UnclosedComment, ex.ErrorType);
Assert.Equal((uint)7, ex.Line);
Assert.Equal((uint)5, ex.CharPosition);
@ -307,7 +206,7 @@ public class LexicalFileTests
program CommentNotClosed;
""";
var ex = Assert.Throws<LexemeException>(() => _lexer.Tokenize(new StringSourceReader(pascalProgram)).ToList());
_testOutputHelper.WriteLine(ex.ToString());
testOutputHelper.WriteLine(ex.ToString());
Assert.Equal(LexemeErrorType.UnclosedComment, ex.ErrorType);
Assert.Equal((uint)4, ex.Line);
Assert.Equal((uint)26, ex.CharPosition);
@ -410,4 +309,13 @@ public class LexicalFileTests
List<SemanticToken> tokens = tokensEnumerable.ToList();
Assert.NotNull(tokens);
}
private static void ValidateSemanticTokens(IEnumerable<SemanticToken> actualTokens,
IEnumerable<SemanticTokenType> expectedTypes)
{
foreach ((SemanticTokenType type, SemanticToken token) in expectedTypes.Zip(actualTokens))
{
Assert.Equal(type, token.TokenType);
}
}
}