CanonSharp/CanonSharp.Common/LexicalAnalyzer/LexicalScannerBuilder.cs

63 lines
2.1 KiB
C#
Raw Normal View History

using CanonSharp.Common.Abstractions;
namespace CanonSharp.Common.LexicalAnalyzer;
public class LexicalScannerBuilder
{
private readonly Dictionary<NondeterministicState, LexicalToken> _finalStateMap = [];
private readonly List<NondeterministicFiniteAutomation> _nondeterministicFiniteAutomations = [];
private readonly HashSet<LexicalToken> _skippedTokens = [];
internal LexicalScannerBuilder()
{
}
public void DefineToken(LexicalToken token)
{
NondeterministicFiniteAutomation automation = token.Expression.Convert2Nfa();
_nondeterministicFiniteAutomations.Add(automation);
foreach (NondeterministicState state in automation.FinalStates)
{
_finalStateMap.Add(state, token);
}
}
public void AddSkippedToken(LexicalToken token) => _skippedTokens.Add(token);
public LexicalScanner Build(ISourceReader reader)
{
NondeterministicFiniteAutomation finaAutomation = Combine();
DeterministicFiniteAutomation deterministicFiniteAutomation =
DeterministicFiniteAutomation.Create(finaAutomation);
Dictionary<DeterministicState, LexicalToken> finalTokenMap = [];
foreach (DeterministicState state in deterministicFiniteAutomation.FinalStates)
{
finalTokenMap.Add(state, state.Closure
.Where(s => _finalStateMap.ContainsKey(s))
.Select(s => _finalStateMap[s])
.OrderByDescending(t => t.Priority)
.First());
}
return new LexicalScanner(deterministicFiniteAutomation.Start, finalTokenMap, _skippedTokens, reader);
}
private NondeterministicFiniteAutomation Combine()
{
NondeterministicState head = new();
NondeterministicFiniteAutomation result = new(head, []);
foreach (NondeterministicFiniteAutomation automation in _nondeterministicFiniteAutomations)
{
head.AddTransaction(EmptyChar.Empty, automation.Start);
result.FinalStates.UnionWith(automation.FinalStates);
}
return result;
}
}