From f273f5d74343d9929059f6fccfb4baaa6b787508 Mon Sep 17 00:00:00 2001 From: Nikolaus Moll Date: Sat, 23 Dec 2023 03:06:58 +0100 Subject: [PATCH] Bigger Refactoring, splitting TokenType from TokenBase, added Unit Test for comment after values --- .../Exceptions/NoTokenRecognizedException.cs | 24 ++ .../NoneTokenRecognizedException.cs | 24 -- .../src/Models/Tokens/CategoryClosingToken.cs | 11 - .../src/Models/Tokens/CategoryOpeningToken.cs | 8 - InfHelper/src/Models/Tokens/EqualityToken.cs | 8 - .../src/Models/Tokens/InlineCommentToken.cs | 8 - InfHelper/src/Models/Tokens/LetterToken.cs | 31 -- .../Models/Tokens/LineConcatenatorToken.cs | 8 - InfHelper/src/Models/Tokens/NewLineToken.cs | 8 - InfHelper/src/Models/Tokens/SpaceToken.cs | 8 - InfHelper/src/Models/Tokens/Token.cs | 13 + InfHelper/src/Models/Tokens/TokenBase.cs | 15 - InfHelper/src/Models/Tokens/TokenType.cs | 139 +++++++-- .../src/Models/Tokens/ValueMarkerToken.cs | 8 - .../src/Models/Tokens/ValueSeparatorToken.cs | 8 - .../src/Models/Tokens/WhiteSpaceToken.cs | 12 - InfHelper/src/Parsers/BasicTokenParser.cs | 134 ++++---- InfHelper/src/Parsers/ContentParser.cs | 286 +++++++++--------- InfHelper/src/Parsers/ITokenParser.cs | 10 +- InfHelperTests/src/InfHelperTests.cs | 50 +-- .../src/Parsers/BasicTokenParserTests.cs | 127 ++++---- 21 files changed, 464 insertions(+), 476 deletions(-) create mode 100644 InfHelper/src/Exceptions/NoTokenRecognizedException.cs delete mode 100644 InfHelper/src/Exceptions/NoneTokenRecognizedException.cs delete mode 100644 InfHelper/src/Models/Tokens/CategoryClosingToken.cs delete mode 100644 InfHelper/src/Models/Tokens/CategoryOpeningToken.cs delete mode 100644 InfHelper/src/Models/Tokens/EqualityToken.cs delete mode 100644 InfHelper/src/Models/Tokens/InlineCommentToken.cs delete mode 100644 InfHelper/src/Models/Tokens/LetterToken.cs delete mode 100644 InfHelper/src/Models/Tokens/LineConcatenatorToken.cs delete mode 100644 InfHelper/src/Models/Tokens/NewLineToken.cs delete mode 100644 InfHelper/src/Models/Tokens/SpaceToken.cs create mode 100644 InfHelper/src/Models/Tokens/Token.cs delete mode 100644 InfHelper/src/Models/Tokens/TokenBase.cs delete mode 100644 InfHelper/src/Models/Tokens/ValueMarkerToken.cs delete mode 100644 InfHelper/src/Models/Tokens/ValueSeparatorToken.cs delete mode 100644 InfHelper/src/Models/Tokens/WhiteSpaceToken.cs diff --git a/InfHelper/src/Exceptions/NoTokenRecognizedException.cs b/InfHelper/src/Exceptions/NoTokenRecognizedException.cs new file mode 100644 index 0000000..23bedcc --- /dev/null +++ b/InfHelper/src/Exceptions/NoTokenRecognizedException.cs @@ -0,0 +1,24 @@ +using System; +using System.Runtime.Serialization; + +namespace InfHelper.Exceptions +{ + public class NoTokenRecognizedException : InfParserException + { + public NoTokenRecognizedException() + { + } + + public NoTokenRecognizedException(string message) : base(message) + { + } + + public NoTokenRecognizedException(string message, Exception innerException) : base(message, innerException) + { + } + + protected NoTokenRecognizedException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } +} \ No newline at end of file diff --git a/InfHelper/src/Exceptions/NoneTokenRecognizedException.cs b/InfHelper/src/Exceptions/NoneTokenRecognizedException.cs deleted file mode 100644 index df2084b..0000000 --- a/InfHelper/src/Exceptions/NoneTokenRecognizedException.cs +++ /dev/null @@ -1,24 +0,0 @@ -using System; -using System.Runtime.Serialization; - -namespace InfHelper.Exceptions -{ - public class NoneTokenRecognizedException : InfParserException - { - public NoneTokenRecognizedException() - { - } - - public NoneTokenRecognizedException(string message) : base(message) - { - } - - public NoneTokenRecognizedException(string message, Exception innerException) : base(message, innerException) - { - } - - protected NoneTokenRecognizedException(SerializationInfo info, StreamingContext context) : base(info, context) - { - } - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/CategoryClosingToken.cs b/InfHelper/src/Models/Tokens/CategoryClosingToken.cs deleted file mode 100644 index 2e0d543..0000000 --- a/InfHelper/src/Models/Tokens/CategoryClosingToken.cs +++ /dev/null @@ -1,11 +0,0 @@ -using System.Linq; - -namespace InfHelper.Models.Tokens -{ - public class CategoryClosingToken : TokenBase - { - public override char[] Symbols { get; } = { ']' }; - - public override TokenType Type { get; } = TokenType.CategoryClosing; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/CategoryOpeningToken.cs b/InfHelper/src/Models/Tokens/CategoryOpeningToken.cs deleted file mode 100644 index 45a3792..0000000 --- a/InfHelper/src/Models/Tokens/CategoryOpeningToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class CategoryOpeningToken : TokenBase - { - public override char[] Symbols { get; } = {'['}; - public override TokenType Type { get; } =TokenType.CategoryOpening; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/EqualityToken.cs b/InfHelper/src/Models/Tokens/EqualityToken.cs deleted file mode 100644 index 701ec7a..0000000 --- a/InfHelper/src/Models/Tokens/EqualityToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class EqualityToken : TokenBase - { - public override char[] Symbols { get; } = {'='}; - public override TokenType Type { get; } = TokenType.EQ; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/InlineCommentToken.cs b/InfHelper/src/Models/Tokens/InlineCommentToken.cs deleted file mode 100644 index 4fc1cdd..0000000 --- a/InfHelper/src/Models/Tokens/InlineCommentToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class InlineCommentToken : TokenBase - { - public override char[] Symbols { get; } = { ';' }; - public override TokenType Type { get; } = TokenType.InlineComment; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/LetterToken.cs b/InfHelper/src/Models/Tokens/LetterToken.cs deleted file mode 100644 index 6b83c82..0000000 --- a/InfHelper/src/Models/Tokens/LetterToken.cs +++ /dev/null @@ -1,31 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; - -namespace InfHelper.Models.Tokens -{ - public class LetterToken : TokenBase - { - public override char[] Symbols - { - get - { - var symbols = new HashSet(); - for (int i = char.MinValue; i <= char.MaxValue; i++) - { - char c = Convert.ToChar(i); - if (!char.IsControl(c) && !char.IsWhiteSpace(c)) - symbols.Add(c); - } - return symbols.ToArray(); - } - } - - public override TokenType Type { get; } = TokenType.Letter; - - public override bool IsToken(char c) - { - return !char.IsControl(c) && !char.IsWhiteSpace(c); - } - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/LineConcatenatorToken.cs b/InfHelper/src/Models/Tokens/LineConcatenatorToken.cs deleted file mode 100644 index 5c53c5d..0000000 --- a/InfHelper/src/Models/Tokens/LineConcatenatorToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class LineConcatenatorToken : TokenBase - { - public override char[] Symbols { get; } = {'\\'}; - public override TokenType Type { get; } = TokenType.LineConcatenator; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/NewLineToken.cs b/InfHelper/src/Models/Tokens/NewLineToken.cs deleted file mode 100644 index 2bf303b..0000000 --- a/InfHelper/src/Models/Tokens/NewLineToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class NewLineToken : TokenBase - { - public override char[] Symbols { get; } = { '\n','\r' }; - public override TokenType Type { get; } = TokenType.NewLine; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/SpaceToken.cs b/InfHelper/src/Models/Tokens/SpaceToken.cs deleted file mode 100644 index 2d036ff..0000000 --- a/InfHelper/src/Models/Tokens/SpaceToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class SpaceToken : TokenBase - { - public override char[] Symbols { get; } = { ' ' }; - public override TokenType Type { get; } = TokenType.Space; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/Token.cs b/InfHelper/src/Models/Tokens/Token.cs new file mode 100644 index 0000000..5916505 --- /dev/null +++ b/InfHelper/src/Models/Tokens/Token.cs @@ -0,0 +1,13 @@ +namespace InfHelper.Models.Tokens +{ + public class Token + { + public Token(TokenType tokenType) + { + Type = tokenType; + } + + public char Symbol { get; set; } + public TokenType Type { get; } + } +} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/TokenBase.cs b/InfHelper/src/Models/Tokens/TokenBase.cs deleted file mode 100644 index f8d84de..0000000 --- a/InfHelper/src/Models/Tokens/TokenBase.cs +++ /dev/null @@ -1,15 +0,0 @@ -using System.Linq; - -namespace InfHelper.Models.Tokens -{ - public abstract class TokenBase - { - public abstract char[] Symbols { get; } - public virtual bool IsToken(char c) - { - return Symbols.Contains(c); - } - public char Symbol { get; set; } - public abstract TokenType Type { get; } - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/TokenType.cs b/InfHelper/src/Models/Tokens/TokenType.cs index 51b556b..9f72429 100644 --- a/InfHelper/src/Models/Tokens/TokenType.cs +++ b/InfHelper/src/Models/Tokens/TokenType.cs @@ -1,29 +1,122 @@ -namespace InfHelper.Models.Tokens +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; + +namespace InfHelper.Models.Tokens { + + internal abstract class PredicateBasedTokenTypeAttribute: Attribute + { + protected PredicateBasedTokenTypeAttribute(Predicate predicate) + { + Predicate = predicate; + } + + internal readonly Predicate Predicate; + + } + + internal class SymbolBasedTokenTypeAttribute: Attribute + { + internal SymbolBasedTokenTypeAttribute(string symbols) + { + Symbols = symbols.ToCharArray(); + } + + internal SymbolBasedTokenTypeAttribute(char symbol) + { + Symbols = new []{symbol}; + } + + public char[] Symbols { get; } + } + + internal class LetterTokenTypeAttribute: PredicateBasedTokenTypeAttribute + { + public LetterTokenTypeAttribute() : base(c => !char.IsControl(c) && !char.IsWhiteSpace(c)) + { + } + } + + internal class WhiteSpaceTokenTypeAttribute: PredicateBasedTokenTypeAttribute + { + public WhiteSpaceTokenTypeAttribute() : base(char.IsWhiteSpace) + { + } + + } + public enum TokenType { - // a-z A-Z - Letter, - // = - EQ, - // [ - CategoryOpening, - // ] - CategoryClosing, - // \r space etc - WhiteSpace, - // \n - NewLine, - // \ - LineConcatenator, - // , - ValueSeparator, - // " - ValueMarker, - // ; - InlineComment, + [LetterTokenType] Letter, + [SymbolBasedTokenType('=')] Equality, + [SymbolBasedTokenType('[')] CategoryOpening, + [SymbolBasedTokenType(']')] CategoryClosing, + [WhiteSpaceTokenType] WhiteSpace, + [SymbolBasedTokenType("\n\r")] NewLine, + [SymbolBasedTokenType('\\')] LineConcatenator, + [SymbolBasedTokenType(',')] ValueSeparator, + [SymbolBasedTokenType('"')] ValueMarker, + [SymbolBasedTokenType(';')] InlineComment, + [SymbolBasedTokenType(' ')] Space + } + + public static class TokenTypes + { + + private static readonly Dictionary> Predicates = Initialize(); + + private static Dictionary> Initialize() + { + var predicates = new Dictionary>(); + foreach (var tokenType in Enum.GetValues(typeof(TokenType)).Cast()) + { + switch (GetAttr(tokenType)) + { + case PredicateBasedTokenTypeAttribute attribute: + predicates.Add(tokenType, attribute.Predicate); + break; + case SymbolBasedTokenTypeAttribute attribute: + predicates.Add(tokenType, c => attribute.Symbols.Contains(c)); + break; + default: + throw new Exception("Misconfigured TokenType: " + tokenType); + } + } + return predicates; + } + + public static bool IsToken(TokenType tokenType, char c) + { + return Predicates[tokenType].Invoke(c); + } - // just space (not a tab or newline) - Space + public static Token CreateToken(TokenType tokenType, char c) + { + return new Token(tokenType) + { + Symbol = c + }; + } + + private static Attribute GetAttr(TokenType tokenType) + { + var memberInfo = ForValue(tokenType); + if (memberInfo == null) + { + throw new Exception("Misconfigured TokenType: " + tokenType); + } + + return Attribute.GetCustomAttribute(memberInfo, typeof(Attribute)); + } + + private static MemberInfo ForValue(TokenType tokenType) + { + var name = Enum.GetName(typeof(TokenType), tokenType); + return name != null ? typeof(TokenType).GetField(name) : null; + } + } + } \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/ValueMarkerToken.cs b/InfHelper/src/Models/Tokens/ValueMarkerToken.cs deleted file mode 100644 index a50ecea..0000000 --- a/InfHelper/src/Models/Tokens/ValueMarkerToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class ValueMarkerToken : TokenBase - { - public override char[] Symbols { get; } = { '"' }; - public override TokenType Type { get; } = TokenType.ValueMarker; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/ValueSeparatorToken.cs b/InfHelper/src/Models/Tokens/ValueSeparatorToken.cs deleted file mode 100644 index ab1179c..0000000 --- a/InfHelper/src/Models/Tokens/ValueSeparatorToken.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class ValueSeparatorToken : TokenBase - { - public override char[] Symbols { get; } = { ',' }; - public override TokenType Type { get; } = TokenType.ValueSeparator; - } -} \ No newline at end of file diff --git a/InfHelper/src/Models/Tokens/WhiteSpaceToken.cs b/InfHelper/src/Models/Tokens/WhiteSpaceToken.cs deleted file mode 100644 index 18166ee..0000000 --- a/InfHelper/src/Models/Tokens/WhiteSpaceToken.cs +++ /dev/null @@ -1,12 +0,0 @@ -namespace InfHelper.Models.Tokens -{ - public class WhiteSpaceToken : TokenBase - { - public override char[] Symbols { get; } = { ' ', '\t' ,' '}; - public override TokenType Type { get; } = TokenType.WhiteSpace; - public override bool IsToken(char c) - { - return char.IsWhiteSpace(c); - } - } -} \ No newline at end of file diff --git a/InfHelper/src/Parsers/BasicTokenParser.cs b/InfHelper/src/Parsers/BasicTokenParser.cs index e1165d8..a1ddbcf 100644 --- a/InfHelper/src/Parsers/BasicTokenParser.cs +++ b/InfHelper/src/Parsers/BasicTokenParser.cs @@ -8,60 +8,60 @@ namespace InfHelper.Parsers { public class BasicTokenParser : ITokenParser { - private ISet allTokens; + private ISet _allTokenTypes; - public ISet AllTokens + public ISet AllTokenTypes { - get => allTokens; - private set - { - //Sort by priority - some tokens share symbols e.g. line concentrator and letter - allTokens = new HashSet(value.OrderByDescending(x => (int)x.Type)); - } + get => _allTokenTypes; + set => _allTokenTypes = PrioritizeTokenTypes(value); + } + + private ISet PrioritizeTokenTypes(IEnumerable value) + { + //Sort by priority - some tokens share symbols e.g. line concentrator and letter + return new HashSet(value.OrderByDescending(x => (int)x)); } public uint Length { get; private set; } public uint Position { get; private set; } - public ISet AllowedTokens { get; set; } - public ISet IgnoredTokens { get; set; } - - public event EventHandler InvalidTokenFound; - public event EventHandler ValidTokenFound; + private ISet _allowedTokenTypes; + public ISet AllowedTokenTypes + { + get => _allowedTokenTypes; + set => _allowedTokenTypes = value ?? new HashSet(); + } - public BasicTokenParser() : this(new HashSet(), new HashSet()) + private ISet _ignoredTokenTypes; + public ISet IgnoredTokenTypes { - AllTokens = AllAvailableTokens; + get => _ignoredTokenTypes; + set => _ignoredTokenTypes = value ?? new HashSet(); } - public BasicTokenParser(ISet allowedTokens, ISet ignoredTokens) + public event EventHandler InvalidTokenFound; + public event EventHandler ValidTokenFound; + + public BasicTokenParser() : this(new HashSet(), new HashSet()) { - AllTokens = AllAvailableTokens; - AllowedTokens = allowedTokens; - IgnoredTokens = ignoredTokens; + AllTokenTypes = AllAvailableTokenTypes; } - public BasicTokenParser(ISet allTokens, ISet allowedTokens, ISet ignoredTokens) + public BasicTokenParser(ISet allowedTokenTypes, ISet ignoredTokenTypes) { - AllTokens = allTokens; - AllowedTokens = allowedTokens; - IgnoredTokens = ignoredTokens; + AllTokenTypes = AllAvailableTokenTypes; + AllowedTokenTypes = allowedTokenTypes; + IgnoredTokenTypes = ignoredTokenTypes; } - public static ISet AllAvailableTokens => new HashSet + public BasicTokenParser(ISet allTokenTypes, ISet allowedTokenTypes, ISet ignoredTokenTypes) { - new CategoryClosingToken(), - new CategoryOpeningToken(), - new EqualityToken(), - new InlineCommentToken(), - new NewLineToken(), - new SpaceToken(), - new WhiteSpaceToken(), - new LineConcatenatorToken(), - new LetterToken(), - new ValueSeparatorToken(), - new ValueMarkerToken() - }; + AllTokenTypes = allTokenTypes; + AllowedTokenTypes = allowedTokenTypes; + IgnoredTokenTypes = ignoredTokenTypes; + } + + public static ISet AllAvailableTokenTypes => new HashSet(Enum.GetValues(typeof(TokenType)).Cast()); public virtual void Parse(string formula) { @@ -73,7 +73,6 @@ public virtual void Parse(string formula) foreach (var c in formula) { - bool found = false; Position += 1; if (c == '\n') @@ -84,39 +83,40 @@ public virtual void Parse(string formula) } col++; line += c; + HandleToken(c, row, col, line); + } + } - //examine all known tokens - foreach (var token in AllTokens) - { - - if (!token.IsToken(c)) continue; - - //tokenBase found - token.Symbol = c; - found = true; - - //ignored tokenBase detected - if (IgnoredTokens != null && IgnoredTokens.Any(x => x.Type == token.Type)) - continue; + private void HandleToken(char c, int row, int col, string line) + { + // examine matching TokenTypes but skip ignored + foreach (var tokenType in AllTokenTypes + .Where(IsMatchingTokenType(c)) + .Where(tokenType => !IgnoredTokenTypes.Contains(tokenType))) + { + GetTokenHandler(tokenType)?.Invoke(this, TokenTypes.CreateToken(tokenType, c)); + return; + } - //not allowed tokenBase detected - if (AllowedTokens == null || AllowedTokens.All(x => x.Type != token.Type)) - { - InvalidTokenFound?.Invoke(this, token); - continue; - } + // no unignored match, so let's see if we recognize the character at all + if (!AllTokenTypes.Any(IsMatchingTokenType(c))) + { + // TokenType not recognized + throw new NoTokenRecognizedException($"TokenType not recognized in row:{row} col:{col}" + Environment.NewLine + + "Examined symbol: " + c + Environment.NewLine + + "Symbol number: " + Convert.ToInt16(c) + Environment.NewLine + + "Examined line: " + line); + } + } - //allowed tokenBase detected - ValidTokenFound?.Invoke(this, token); - break; - } + private static Func IsMatchingTokenType(char c) + { + return tokenType => TokenTypes.IsToken(tokenType, c); + } - //tokenBase not recognized - if (!found) - throw new NoneTokenRecognizedException($"None tokenBase recognized in row:{row} col:{col}" + Environment.NewLine + "Examined symbol: " + c - + "\nSymbol number: " + Convert.ToInt16(c) - + "\nExamined line: " + line); - } + private EventHandler GetTokenHandler(TokenType tokenType) + { + return AllowedTokenTypes.Contains(tokenType) ? ValidTokenFound : InvalidTokenFound; } } -} \ No newline at end of file +} diff --git a/InfHelper/src/Parsers/ContentParser.cs b/InfHelper/src/Parsers/ContentParser.cs index 830c932..03dad5f 100644 --- a/InfHelper/src/Parsers/ContentParser.cs +++ b/InfHelper/src/Parsers/ContentParser.cs @@ -10,12 +10,12 @@ namespace InfHelper.Parsers { public class ContentParser { - private Category currentCategory; - private Key currentKey; - private readonly ITokenParser parser; - private string keyTmpValue; - private Action previousParsing; - private string parsingType = ""; + private Category _currentCategory; + private Key _currentKey; + private readonly ITokenParser _parser; + private string _keyTmpValue; + private Action _previousParsing; + private string _parsingType = ""; public ContentParser() : this(new BasicTokenParser()) { @@ -24,14 +24,14 @@ public ContentParser() : this(new BasicTokenParser()) public ContentParser(ITokenParser parser) { - this.parser = parser; - parser.InvalidTokenFound += InvalidTokenFound; + _parser = parser; + _parser.InvalidTokenFound += InvalidTokenFound; } public void Parse(string content) { InitMainParsing(); - parser.Parse(content); + _parser.Parse(content); ValueParsingComplete(); KeyParsingComplete(); CategoryParsingComplete(); @@ -47,20 +47,20 @@ public void Parse(string content) /// protected void InitMainParsing() { - parsingType = "main parsing"; + _parsingType = "main parsing"; ClearAllMyCallbacks(); - parser.ValidTokenFound += ValidTokenFoundDuringMainParsing; + _parser.ValidTokenFound += ValidTokenFoundDuringMainParsing; - parser.AllowedTokens = new HashSet + _parser.AllowedTokenTypes = new HashSet { - new InlineCommentToken(), - new CategoryOpeningToken(), + TokenType.InlineComment, + TokenType.CategoryOpening, }; - parser.IgnoredTokens = new HashSet + _parser.IgnoredTokenTypes = new HashSet { - new WhiteSpaceToken(), - new NewLineToken() + TokenType.WhiteSpace, + TokenType.NewLine }; } @@ -69,17 +69,17 @@ protected void InitMainParsing() /// protected void InitCategoryParsing() { - parsingType = "category parsing"; - currentCategory = new Category(); + _parsingType = "category parsing"; + _currentCategory = new Category(); ClearAllMyCallbacks(); - parser.ValidTokenFound += ValidTokenFoundDuringCategoryParsing; - parser.IgnoredTokens?.Clear(); - parser.AllowedTokens = new HashSet + _parser.ValidTokenFound += ValidTokenFoundDuringCategoryParsing; + _parser.IgnoredTokenTypes?.Clear(); + _parser.AllowedTokenTypes = new HashSet { - new SpaceToken(), - new CategoryClosingToken(), - new LetterToken(), - new LineConcatenatorToken() + TokenType.Space, + TokenType.CategoryClosing, + TokenType.Letter, + TokenType.LineConcatenator }; } @@ -88,27 +88,27 @@ protected void InitCategoryParsing() /// protected void InitKeyIdParsing() { - parsingType = "key id parsing"; - currentKey = new Key(); + _parsingType = "key id parsing"; + _currentKey = new Key(); ClearAllMyCallbacks(); - parser.ValidTokenFound += ValidTokenFoundDuringKeyIdParsing; + _parser.ValidTokenFound += ValidTokenFoundDuringKeyIdParsing; - parser.AllowedTokens = new HashSet + _parser.AllowedTokenTypes = new HashSet { - new InlineCommentToken(), - new LetterToken(), - new EqualityToken(), - new SpaceToken(), - new WhiteSpaceToken(), - new CategoryOpeningToken(), - new NewLineToken(), - new ValueSeparatorToken(), - new ValueMarkerToken(), + TokenType.InlineComment, + TokenType.Letter, + TokenType.Equality, + TokenType.Space, + TokenType.WhiteSpace, + TokenType.CategoryOpening, + TokenType.NewLine, + TokenType.ValueSeparator, + TokenType.ValueMarker, }; - parser.IgnoredTokens = new HashSet + _parser.IgnoredTokenTypes = new HashSet { - new LineConcatenatorToken(), + TokenType.LineConcatenator, }; } @@ -117,50 +117,50 @@ protected void InitKeyIdParsing() /// protected void InitKeyValueParsing() { - parsingType = "key value parsing"; + _parsingType = "key value parsing"; ClearAllMyCallbacks(); - parser.ValidTokenFound += ValidTokenFoundDuringKeyValueParsing; + _parser.ValidTokenFound += ValidTokenFoundDuringKeyValueParsing; - parser.AllowedTokens = new HashSet + _parser.AllowedTokenTypes = new HashSet { - new ValueSeparatorToken(), - new LetterToken(), - new NewLineToken(), - new SpaceToken(), - new WhiteSpaceToken(), - new InlineCommentToken(), - new ValueMarkerToken() + TokenType.ValueSeparator, + TokenType.Letter, + TokenType.NewLine, + TokenType.Space, + TokenType.WhiteSpace, + TokenType.InlineComment, + TokenType.ValueMarker }; - parser.IgnoredTokens = new HashSet() + _parser.IgnoredTokenTypes = new HashSet() { - new LineConcatenatorToken(), - new EqualityToken() + TokenType.LineConcatenator, + TokenType.Equality }; } protected void InitPureValueParsing() { - parsingType = "pure value parsing"; + _parsingType = "pure value parsing"; ClearAllMyCallbacks(); - parser.ValidTokenFound += ValidTokenFoundDuringPureValueParsing; + _parser.ValidTokenFound += ValidTokenFoundDuringPureValueParsing; - parser.AllowedTokens = new HashSet + _parser.AllowedTokenTypes = new HashSet { - new LetterToken(), - new ValueMarkerToken(), - new SpaceToken(), - new WhiteSpaceToken(), - new ValueSeparatorToken() + TokenType.Letter, + TokenType.ValueMarker, + TokenType.Space, + TokenType.WhiteSpace, + TokenType.ValueSeparator }; - parser.IgnoredTokens = new HashSet() + _parser.IgnoredTokenTypes = new HashSet() { - new InlineCommentToken(), - new LineConcatenatorToken(), - new EqualityToken(), - new CategoryOpeningToken(), - new CategoryClosingToken() + TokenType.InlineComment, + TokenType.LineConcatenator, + TokenType.Equality, + TokenType.CategoryOpening, + TokenType.CategoryClosing }; } @@ -169,110 +169,110 @@ protected void InitPureValueParsing() /// protected void InitCommentParsing(Action previous) { - parsingType = "comment parsing"; - previousParsing = previous; + _parsingType = "comment parsing"; + _previousParsing = previous; ClearAllMyCallbacks(); - parser.ValidTokenFound += ValidTokenFoundDuringCommentParsing; + _parser.ValidTokenFound += ValidTokenFoundDuringCommentParsing; - parser.AllowedTokens = new HashSet + _parser.AllowedTokenTypes = new HashSet { - new NewLineToken(), + TokenType.NewLine, }; - parser.IgnoredTokens = new HashSet() + _parser.IgnoredTokenTypes = new HashSet() { - new LetterToken(), - new SpaceToken(), - new WhiteSpaceToken(), - new ValueMarkerToken(), - new ValueSeparatorToken(), - new LineConcatenatorToken(), - new EqualityToken(), - new InlineCommentToken(), - new CategoryOpeningToken(), - new CategoryClosingToken(), + TokenType.Letter, + TokenType.Space, + TokenType.WhiteSpace, + TokenType.ValueMarker, + TokenType.ValueSeparator, + TokenType.LineConcatenator, + TokenType.Equality, + TokenType.InlineComment, + TokenType.CategoryOpening, + TokenType.CategoryClosing, }; } // Parsing value inside "" - private void ValidTokenFoundDuringPureValueParsing(object sender, TokenBase tokenBase) + private void ValidTokenFoundDuringPureValueParsing(object sender, Token token) { - switch (tokenBase.Type) + switch (token.Type) { case TokenType.Letter: case TokenType.ValueSeparator: case TokenType.WhiteSpace: case TokenType.Space: - keyTmpValue += tokenBase.Symbol; + _keyTmpValue += token.Symbol; break; case TokenType.ValueMarker: ValueParsingComplete(true); InitKeyValueParsing(); break; default: - throw new InvalidTokenException("Invalid tokenBase found during comment parsing: " + tokenBase.Symbol); + throw new InvalidTokenException("Invalid tokenBase found during comment parsing: " + token.Symbol); } } //Parsing inline comment - private void ValidTokenFoundDuringCommentParsing(object sender, TokenBase tokenBase) + private void ValidTokenFoundDuringCommentParsing(object sender, Token token) { - switch (tokenBase.Type) + switch (token.Type) { case TokenType.NewLine: - previousParsing(); + _previousParsing(); break; default: - throw new InvalidTokenException("Invalid tokenBase found during comment parsing: " + tokenBase.Symbol); + throw new InvalidTokenException("Invalid tokenBase found during comment parsing: " + token.Symbol); } } //Parsing top layer - protected void ValidTokenFoundDuringMainParsing(object sender, TokenBase tokenBase) + protected void ValidTokenFoundDuringMainParsing(object sender, Token token) { - switch (tokenBase.Type) + switch (token.Type) { case TokenType.InlineComment: //go to next line, init comment parsing InitCommentParsing(InitMainParsing); break; case TokenType.CategoryOpening: - currentCategory = new Category(); + _currentCategory = new Category(); InitCategoryParsing(); break; default: - throw new InvalidTokenException("Invalid tokenBase found during parsing of the file: " + tokenBase.Symbol); + throw new InvalidTokenException("Invalid tokenBase found during parsing of the file: " + token.Symbol); } } //when parsing a category - protected void ValidTokenFoundDuringCategoryParsing(object sender, TokenBase tokenBase) + protected void ValidTokenFoundDuringCategoryParsing(object sender, Token token) { - switch (tokenBase.Type) + switch (token.Type) { case TokenType.CategoryClosing: InitKeyIdParsing(); break; case TokenType.LineConcatenator: - if (this.parser.Position == (this.parser.Length - 1)) + if (this._parser.Position == (this._parser.Length - 1)) { throw new InvalidTokenException(@"'\' are not allowed as the last token in a Category"); } - currentCategory.Name += tokenBase.Symbol; + _currentCategory.Name += token.Symbol; break; case TokenType.Letter: case TokenType.Space: - currentCategory.Name += tokenBase.Symbol; + _currentCategory.Name += token.Symbol; break; default: - throw new InvalidTokenException("Invalid tokenBase found during parsing of the file: " + tokenBase.Symbol); + throw new InvalidTokenException("Invalid tokenBase found during parsing of the file: " + token.Symbol); } } //when parsing a tokenBase id - protected void ValidTokenFoundDuringKeyIdParsing(object sender, TokenBase tokenBase) + protected void ValidTokenFoundDuringKeyIdParsing(object sender, Token token) { - switch (tokenBase.Type) + switch (token.Type) { case TokenType.ValueMarker: InitPureValueParsing(); @@ -281,12 +281,12 @@ protected void ValidTokenFoundDuringKeyIdParsing(object sender, TokenBase tokenB SerializeCurrentTmpValueAsAnonymousKey(); break; case TokenType.NewLine: - if (!string.IsNullOrEmpty(keyTmpValue)) + if (!string.IsNullOrEmpty(_keyTmpValue)) SerializeCurrentTmpValueAsAnonymousKey(); break; - case TokenType.EQ: + case TokenType.Equality: // multiple EQ tokens in formula - if (!string.IsNullOrEmpty(currentKey.Id)) + if (!string.IsNullOrEmpty(_currentKey.Id)) { throw new InvalidTokenException("Equality tokenBase detected, but not expected."); } @@ -296,13 +296,13 @@ protected void ValidTokenFoundDuringKeyIdParsing(object sender, TokenBase tokenB case TokenType.WhiteSpace: case TokenType.Space: //ignore spaces at the begining - if (!string.IsNullOrEmpty(keyTmpValue)) + if (!string.IsNullOrEmpty(_keyTmpValue)) { - keyTmpValue += tokenBase.Symbol; + _keyTmpValue += token.Symbol; } break; case TokenType.Letter: - keyTmpValue += tokenBase.Symbol; + _keyTmpValue += token.Symbol; break; case TokenType.CategoryOpening: KeyParsingComplete(); @@ -313,20 +313,20 @@ protected void ValidTokenFoundDuringKeyIdParsing(object sender, TokenBase tokenB InitCommentParsing(InitKeyIdParsing); break; default: - throw new InvalidTokenException("Invalid tokenBase found during parsing of the file: " + tokenBase.Symbol); + throw new InvalidTokenException("Invalid tokenBase found during parsing of the file: " + token.Symbol); } } //When parsing value - protected void ValidTokenFoundDuringKeyValueParsing(object sender, TokenBase tokenBase) + protected void ValidTokenFoundDuringKeyValueParsing(object sender, Token token) { - switch (tokenBase.Type) + switch (token.Type) { case TokenType.ValueSeparator: ValueParsingComplete(); break; case TokenType.Letter: - keyTmpValue += tokenBase.Symbol; + _keyTmpValue += token.Symbol; break; case TokenType.NewLine: ValueParsingComplete(); @@ -334,7 +334,7 @@ protected void ValidTokenFoundDuringKeyValueParsing(object sender, TokenBase tok InitKeyIdParsing(); break; case TokenType.WhiteSpace: - if (string.IsNullOrEmpty(keyTmpValue)) + if (string.IsNullOrEmpty(_keyTmpValue)) { ValueParsingComplete(); } @@ -350,56 +350,56 @@ protected void ValidTokenFoundDuringKeyValueParsing(object sender, TokenBase tok } } - protected void InvalidTokenFound(object sender, TokenBase tokenBase) + protected void InvalidTokenFound(object sender, Token token) { var builder = new StringBuilder(); - builder.AppendLine($"Invalid tokenBase found during {parsingType} parsing: "); - builder.AppendLine($"Symbol: {tokenBase.Symbol}"); - builder.AppendLine($"Token type: {tokenBase.Type}"); - builder.AppendLine($"Allowed tokens: {string.Join(", ", parser.AllowedTokens.Select(t => t.Type.ToString()))}"); - builder.AppendLine($"Ignored tokens: {string.Join(", ", parser.IgnoredTokens.Select(t => t.Type.ToString()))}"); + builder.AppendLine($"Invalid tokenBase found during {_parsingType} parsing: "); + builder.AppendLine($"Symbol: {token.Symbol}"); + builder.AppendLine($"Token type: {token.Type}"); + builder.AppendLine($"Allowed tokens: {string.Join(", ", _parser.AllowedTokenTypes.Select(t => t.ToString()))}"); + builder.AppendLine($"Ignored tokens: {string.Join(", ", _parser.IgnoredTokenTypes.Select(t => t.ToString()))}"); throw new InvalidTokenException(builder.ToString()); } protected void KeyParsingComplete() { - if (currentKey != null && currentKey.KeyValues.Any()) + if (_currentKey != null && _currentKey.KeyValues.Any()) { - currentCategory.Keys.Add(currentKey); - currentKey = null; + _currentCategory.Keys.Add(_currentKey); + _currentKey = null; } } protected void ValueParsingComplete(bool pure = false) { - if (!string.IsNullOrEmpty(keyTmpValue)) + if (!string.IsNullOrEmpty(_keyTmpValue)) { KeyValue keyValue; if (!pure) { keyValue = new KeyValue { - Value = keyTmpValue + Value = _keyTmpValue }; } else { keyValue = new PureValue { - Value = keyTmpValue + Value = _keyTmpValue }; } - currentKey.KeyValues.Add(keyValue); - keyTmpValue = null; + _currentKey.KeyValues.Add(keyValue); + _keyTmpValue = null; } } protected void CategoryParsingComplete() { - if (currentCategory != null) + if (_currentCategory != null) { - CategoryDiscovered?.Invoke(this, currentCategory); - currentCategory = null; + CategoryDiscovered?.Invoke(this, _currentCategory); + _currentCategory = null; } } @@ -408,27 +408,27 @@ protected void SerializeCurrentTmpValueAsAnonymousKey() //TODO Implement this var keyValue = new KeyValue { - Value = keyTmpValue + Value = _keyTmpValue }; - currentKey.KeyValues.Add(keyValue); - keyTmpValue = null; + _currentKey.KeyValues.Add(keyValue); + _keyTmpValue = null; } protected void KeyIdParsingCompleted() { // trim any leading or trailing whitespace - currentKey.Id = keyTmpValue.Trim(); - keyTmpValue = null; + _currentKey.Id = _keyTmpValue.Trim(); + _keyTmpValue = null; } private void ClearAllMyCallbacks() { - parser.ValidTokenFound -= ValidTokenFoundDuringMainParsing; - parser.ValidTokenFound -= ValidTokenFoundDuringCategoryParsing; - parser.ValidTokenFound -= ValidTokenFoundDuringKeyIdParsing; - parser.ValidTokenFound -= ValidTokenFoundDuringKeyValueParsing; - parser.ValidTokenFound -= ValidTokenFoundDuringCommentParsing; - parser.ValidTokenFound -= ValidTokenFoundDuringPureValueParsing; + _parser.ValidTokenFound -= ValidTokenFoundDuringMainParsing; + _parser.ValidTokenFound -= ValidTokenFoundDuringCategoryParsing; + _parser.ValidTokenFound -= ValidTokenFoundDuringKeyIdParsing; + _parser.ValidTokenFound -= ValidTokenFoundDuringKeyValueParsing; + _parser.ValidTokenFound -= ValidTokenFoundDuringCommentParsing; + _parser.ValidTokenFound -= ValidTokenFoundDuringPureValueParsing; } } } \ No newline at end of file diff --git a/InfHelper/src/Parsers/ITokenParser.cs b/InfHelper/src/Parsers/ITokenParser.cs index a51cb8a..c3def69 100644 --- a/InfHelper/src/Parsers/ITokenParser.cs +++ b/InfHelper/src/Parsers/ITokenParser.cs @@ -8,11 +8,11 @@ public interface ITokenParser { uint Length { get; } uint Position { get; } - ISet AllowedTokens { get; set; } - ISet AllTokens { get; } - ISet IgnoredTokens { get; set; } - event EventHandler InvalidTokenFound; - event EventHandler ValidTokenFound; + ISet AllowedTokenTypes { get; set; } + ISet AllTokenTypes { get; } + ISet IgnoredTokenTypes { get; set; } + event EventHandler InvalidTokenFound; + event EventHandler ValidTokenFound; void Parse(string formula); } diff --git a/InfHelperTests/src/InfHelperTests.cs b/InfHelperTests/src/InfHelperTests.cs index f5b70cb..a7160a0 100644 --- a/InfHelperTests/src/InfHelperTests.cs +++ b/InfHelperTests/src/InfHelperTests.cs @@ -12,11 +12,12 @@ namespace InfHelperTests [TestClass()] public class InfHelperTests { - private const string testFolder = "infs"; + private const string TestFolder = "infs"; + [TestMethod()] public void ParseTest() { - var content = File.ReadAllText(Path.Combine(testFolder, "oem100.inf")); + var content = File.ReadAllText(Path.Combine(TestFolder, "oem100.inf")); var helper = new InfUtil(); var data = helper.Parse(content); @@ -33,7 +34,7 @@ public void ParseTest() [TestMethod()] public void ParseCaseInsensitiveTest() { - var content = File.ReadAllText(Path.Combine(testFolder, "oem100.inf")); + var content = File.ReadAllText(Path.Combine(TestFolder, "oem100.inf")); var helper = new InfUtil(); var data = helper.Parse(content); @@ -52,7 +53,7 @@ public void FileParserEndpointTest() { var sw = new Stopwatch(); var helper = new InfUtil(); - var files = Directory.GetFiles(testFolder); + var files = Directory.GetFiles(TestFolder); foreach (var file in files) { sw.Reset(); @@ -72,20 +73,27 @@ public void PureValueParsingTest() "Razer_CoInstaller_CopyFiles = 11\r\n" + "Razer_Installer_CopyFiles = 16422,\"Razer\\RzWizardPkg\"\r\n" + "Razer_Installer_CopyFilesWOW64 = 16426,\"Razer\\RzWizardPkg\"\r\n" + - "Razer_Installer_CopyFilesWithBrackets = 16428,\"Razer\\RzWizardPkg ; [Brackets=X]\""; + "commentAfterPureValue = 16427,\"value\" ; 27\"\r\n" + + "commentAfterValue = 16428,value ; comment\"\r\n" + + "valueWithBrackets = 16429,\"value ; [Brackets=X]\""; var helper = new InfUtil(); var data = helper.Parse(formula); Assert.AreEqual("11", data["DestinationDirs"]["Razer_CoInstaller_CopyFiles"].PrimitiveValue); Assert.AreEqual("16422, \"Razer\\RzWizardPkg\"", data["DestinationDirs"]["Razer_Installer_CopyFiles"].PrimitiveValue); Assert.AreEqual("16426, \"Razer\\RzWizardPkg\"", data["DestinationDirs"]["Razer_Installer_CopyFilesWOW64"].PrimitiveValue); - Assert.AreEqual("16428, \"Razer\\RzWizardPkg ; [Brackets=X]\"", data["DestinationDirs"]["Razer_Installer_CopyFilesWithBrackets"].PrimitiveValue); + Assert.AreEqual("16427, \"value\"", data["DestinationDirs"]["commentAfterPureValue"].PrimitiveValue); + Assert.AreEqual("16428, value", data["DestinationDirs"]["commentAfterValue"].PrimitiveValue); + Assert.AreEqual("16429, \"value ; [Brackets=X]\"", data["DestinationDirs"]["valueWithBrackets"].PrimitiveValue); } [TestMethod()] - public void SearchMethdTest() + public void SearchMethodTest() { - string formula = - "[DestinationDirs]\r\nRazer_CoInstaller_CopyFiles = 11 ; Comment\r\nRazer_Installer_CopyFiles = 16422,\"Razer\\RzWizardPkg\"\r\nRazer_Installer_CopyFilesWOW64 = 16426,\"Razer\\RzWizardPkg\""; + string formula = + "[DestinationDirs]\r\n" + + "Razer_CoInstaller_CopyFiles = 11 ; Comment\r\n" + + "Razer_Installer_CopyFiles = 16422,\"Razer\\RzWizardPkg\"\r\n" + + "Razer_Installer_CopyFilesWOW64 = 16426,\"Razer\\RzWizardPkg\""; var helper = new InfUtil(); var data = helper.Parse(formula); Assert.AreEqual("11", data.FindKeyById("Razer_CoInstaller_CopyFiles").First().PrimitiveValue); @@ -95,35 +103,35 @@ public void SearchMethdTest() public void CustomSerializationTest() { var helper = new InfUtil(); - var serilized = helper.SerializeFileInto(Path.Combine(testFolder, "oem100.inf"), out InfData data); - Assert.AreEqual("HIDClass",serilized.Class); - Assert.AreEqual("%Razer%",serilized.Provider); - Assert.AreEqual("\"Razer Installer\"", serilized.DiskId1); + var serialized = helper.SerializeFileInto(Path.Combine(TestFolder, "oem100.inf"), out InfData data); + Assert.AreEqual("HIDClass",serialized.Class); + Assert.AreEqual("%Razer%",serialized.Provider); + Assert.AreEqual("\"Razer Installer\"", serialized.DiskId1); } [TestMethod()] public void CustomSerializationTest2() { var helper = new InfUtil(); - var serilized = helper.SerializeFileInto(Path.Combine(testFolder, "oem147.inf"), out InfData data); - Assert.AreEqual("net", serilized.Class); - Assert.AreEqual("%PROVIDER_NAME%", serilized.Provider); + var serialized = helper.SerializeFileInto(Path.Combine(TestFolder, "oem147.inf"), out InfData data); + Assert.AreEqual("net", serialized.Class); + Assert.AreEqual("%PROVIDER_NAME%", serialized.Provider); } [TestMethod()] public void CustomSerializationDereferenceTest() { var helper = new InfUtil(); - var serilized = helper.SerializeFileInto(Path.Combine(testFolder, "oem100.inf"), out InfData data); - Assert.AreEqual("HIDClass", serilized.Class); - Assert.AreEqual("Razer Inc", serilized.Provider); + var serialized = helper.SerializeFileInto(Path.Combine(TestFolder, "oem100.inf"), out InfData data); + Assert.AreEqual("HIDClass", serialized.Class); + Assert.AreEqual("Razer Inc", serialized.Provider); } [TestMethod()] public void CustomSerializationHugeDereferenceTest() { var helper = new InfUtil(); - foreach (var file in Directory.GetFiles(testFolder)) + foreach (var file in Directory.GetFiles(TestFolder)) { var serilized = helper.SerializeFileInto(file, out InfData data); Assert.IsNotNull(serilized.Provider); @@ -134,7 +142,7 @@ public void CustomSerializationHugeDereferenceTest() public void CanParseSpacesInCategoryName() { var helper = new InfUtil(); - var info = helper.ParseFile(Path.Combine(testFolder, "spaces.inf")); + var info = helper.ParseFile(Path.Combine(TestFolder, "spaces.inf")); // info.Categories should contain [OEM URLS] Assert.IsTrue(info.Categories.Count(x => x.Name == "OEM URLS") == 1); diff --git a/InfHelperTests/src/Parsers/BasicTokenParserTests.cs b/InfHelperTests/src/Parsers/BasicTokenParserTests.cs index 6132628..77368b4 100644 --- a/InfHelperTests/src/Parsers/BasicTokenParserTests.cs +++ b/InfHelperTests/src/Parsers/BasicTokenParserTests.cs @@ -1,4 +1,6 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; +using System.Linq; using InfHelper.Models.Tokens; using InfHelper.Parsers; using Microsoft.VisualStudio.TestTools.UnitTesting; @@ -12,17 +14,17 @@ public class BasicTokenParserTests public void TokenOrderTest() { string formula = "[TEST]"; - var parser = new BasicTokenParser(new HashSet + var parser = new BasicTokenParser(new HashSet { - new CategoryOpeningToken(), - new LetterToken(), - new CategoryClosingToken(), - }, new HashSet()); - string result = ""; - parser.ValidTokenFound += (sender, token) => result += token.Symbol; + TokenType.CategoryOpening, + TokenType.Letter, + TokenType.CategoryClosing, + }, new HashSet()); + TestTokenHandler validTokenHandler = new TestTokenHandler(); + parser.ValidTokenFound += validTokenHandler.EventHandler; parser.Parse(formula); - Assert.AreEqual(formula, result); + Assert.AreEqual(formula, validTokenHandler.Result); } [TestMethod()] @@ -30,110 +32,107 @@ public void IgnoredTokensTest() { string expression = "[TEST]"; string formula = " \n " + expression + " \n "; - var parser = new BasicTokenParser(new HashSet + var parser = new BasicTokenParser(new HashSet { - new CategoryOpeningToken(), - new LetterToken(), - new CategoryClosingToken(), - }, new HashSet + TokenType.CategoryOpening, + TokenType.Letter, + TokenType.CategoryClosing, + }, new HashSet { - new WhiteSpaceToken(), - new NewLineToken() + TokenType.WhiteSpace, + TokenType.NewLine }); - string result = ""; - parser.ValidTokenFound += (sender, token) => result += token.Symbol; + TestTokenHandler validTokenHandler = new TestTokenHandler(); + parser.ValidTokenFound += validTokenHandler.EventHandler; parser.Parse(formula); - Assert.AreEqual(expression, result); + Assert.AreEqual(expression, validTokenHandler.Result); } [TestMethod()] public void AllowedTokensTest() { string formula = "[TE;ST] \\"; - var parser = new BasicTokenParser(new HashSet + var parser = new BasicTokenParser(new HashSet { - new CategoryOpeningToken(), - new LetterToken(), - new CategoryClosingToken(), - }, new HashSet + TokenType.CategoryOpening, + TokenType.Letter, + TokenType.CategoryClosing, + }, new HashSet { - new WhiteSpaceToken(), - new NewLineToken() + TokenType.WhiteSpace, + TokenType.NewLine }); - string result = ""; - string invalids = ""; - parser.ValidTokenFound += (sender, token) => result += token.Symbol; - parser.InvalidTokenFound += (sender, token) => - { - invalids += token.Symbol; - }; + TestTokenHandler invalidTokenHandler = new TestTokenHandler(); + parser.InvalidTokenFound += invalidTokenHandler.EventHandler; + parser.Parse(formula); - Assert.IsTrue(invalids.Contains(";") && invalids.Contains("\\")); + Assert.AreEqual("; \\", invalidTokenHandler.Result); } [TestMethod()] public void TokensWithSameSymbol() { string formula = "Test = test\\\ntest"; - var parser = new BasicTokenParser(new HashSet + var parser = new BasicTokenParser(new HashSet { - new LetterToken(), - new EqualityToken(), - new LineConcatenatorToken() - }, new HashSet + TokenType.Letter, + TokenType.Equality, + TokenType.LineConcatenator + }, new HashSet { - new WhiteSpaceToken(), - new NewLineToken() + TokenType.WhiteSpace, + TokenType.NewLine }); - var tokens = new List(); - parser.ValidTokenFound += (sender, token) => tokens.Add(token.Type); + var missingTokens = new HashSet {TokenType.Equality, TokenType.Letter, TokenType.LineConcatenator}; + parser.ValidTokenFound += (sender, token) => missingTokens.Remove(token.Type); parser.Parse(formula); - Assert.IsTrue(tokens.Contains(TokenType.EQ) && tokens.Contains(TokenType.Letter) && tokens.Contains(TokenType.LineConcatenator)); + Assert.IsFalse(missingTokens.Any(), + "Missing expected Tokens: " + string.Join(", ", missingTokens.Select(a => a.ToString()))); } [TestMethod()] public void TestOfAdaptability() { string formula = "Test = test\\\ntest"; - var parser = new BasicTokenParser(new HashSet + var parser = new BasicTokenParser(new HashSet { - new LetterToken(), - new EqualityToken(), - }, new HashSet + TokenType.Letter, + TokenType.Equality, + }, new HashSet { - new WhiteSpaceToken(), + TokenType.WhiteSpace, }); string id = ""; string key = ""; - void keyParsing(object sender, TokenBase token) + void KeyParsing(object sender, Token token) { switch (token.Type) { case TokenType.Letter: id += token.Symbol; break; - case TokenType.EQ: - parser.ValidTokenFound -= keyParsing; - parser.AllowedTokens = new HashSet() + case TokenType.Equality: + parser.ValidTokenFound -= KeyParsing; + parser.AllowedTokenTypes = new HashSet() { - new LetterToken(), - new NewLineToken(), - new LineConcatenatorToken(), + TokenType.Letter, + TokenType.NewLine, + TokenType.LineConcatenator, }; - parser.ValidTokenFound += valueParsing; + parser.ValidTokenFound += ValueParsing; break; } } - void valueParsing(object sender, TokenBase token) + void ValueParsing(object sender, Token token) { switch (token.Type) { @@ -141,16 +140,24 @@ void valueParsing(object sender, TokenBase token) key += token.Symbol; break; case TokenType.LineConcatenator: - parser.IgnoredTokens.Add(new NewLineToken()); + parser.IgnoredTokenTypes.Add(TokenType.NewLine); break; } } - parser.ValidTokenFound += keyParsing; + parser.ValidTokenFound += KeyParsing; parser.Parse(formula); Assert.AreEqual(id,"Test"); Assert.AreEqual(key,"testtest"); } } + + class TestTokenHandler + { + public EventHandler EventHandler => (sender, token) => Result += token.Symbol; + + public string Result { get; private set; } + } + } \ No newline at end of file