using System; using System.Collections.Generic; using System.IO; using System.Reflection; using ln.parse.tokenizer; using NUnit.Framework; namespace ln.parse.tests { public class TokenizerTests { StreamWriter output = new StreamWriter(Console.OpenStandardOutput()); Tokenizer tokenizer = Tokenizer.CreateDefaultTokenizer(); KeyValuePair[] primitiveTests = new KeyValuePair[]{ new KeyValuePair("0",typeof(Token.IntegerToken)), new KeyValuePair("1",typeof(Token.IntegerToken)), new KeyValuePair("2",typeof(Token.IntegerToken)), new KeyValuePair("3",typeof(Token.IntegerToken)), new KeyValuePair("4",typeof(Token.IntegerToken)), new KeyValuePair("5",typeof(Token.IntegerToken)), new KeyValuePair("6",typeof(Token.IntegerToken)), new KeyValuePair("7",typeof(Token.IntegerToken)), new KeyValuePair("8",typeof(Token.IntegerToken)), new KeyValuePair("9",typeof(Token.IntegerToken)), new KeyValuePair("10",typeof(Token.IntegerToken)), new KeyValuePair("100",typeof(Token.IntegerToken)), new KeyValuePair("453",typeof(Token.IntegerToken)), new KeyValuePair("75239475",typeof(Token.IntegerToken)), new KeyValuePair("99999999",typeof(Token.IntegerToken)), new KeyValuePair("-15362",typeof(Token.IntegerToken)), new KeyValuePair("-1",typeof(Token.IntegerToken)), new KeyValuePair("-2",typeof(Token.IntegerToken)), new KeyValuePair("-3",typeof(Token.IntegerToken)), new KeyValuePair("-4",typeof(Token.IntegerToken)), new KeyValuePair("-5",typeof(Token.IntegerToken)), new KeyValuePair("0.0",typeof(Token.FloatToken)), new KeyValuePair("-123.456",typeof(Token.FloatToken)), new KeyValuePair("123.456",typeof(Token.FloatToken)), new KeyValuePair("987463.234636",typeof(Token.FloatToken)), new KeyValuePair("-352594.2373782",typeof(Token.FloatToken)), new KeyValuePair("\"Hallo Welt, ich bin ein \\\"String\\\"!\"",typeof(Token.StringToken)), new KeyValuePair("\"a simple string\"",typeof(Token.StringToken)), new KeyValuePair("\"that's it, I can string\"",typeof(Token.StringToken)), new KeyValuePair("(",typeof(Token.BracketToken)), new KeyValuePair(")",typeof(Token.BracketToken)), new KeyValuePair("[",typeof(Token.BracketToken)), new KeyValuePair("]",typeof(Token.BracketToken)), new KeyValuePair("{",typeof(Token.BracketToken)), new KeyValuePair("}",typeof(Token.BracketToken)), new KeyValuePair("\t",typeof(Token.WhiteSpaceToken)), new KeyValuePair("Ich",typeof(Token.IdentifierToken)), new KeyValuePair("IchBinEinIdentifier",typeof(Token.IdentifierToken)), new KeyValuePair(" ",typeof(Token.WhiteSpaceToken)) }; [Test] public void Test_00_Token() { Assert.Pass(); } [Test] public void Test_10_Primitives() { foreach (KeyValuePair primTest in primitiveTests) { output.WriteLine("Primitive Test: {0} => {1}", primTest.Key, primTest.Value); output.Flush(); Token[] token = tokenizer.Parse(primTest.Key); output.WriteLine("Token Source: {0}", token[0].TokenSource); output.WriteLine("Token Value: {0}", token[0].Value); output.Flush(); Assert.AreEqual(1, token.Length); Assert.AreEqual(primTest.Value, token[0].GetType()); Assert.AreEqual(primTest.Key, token[0].TokenSource); } Assert.Pass(); } string complexSource = null; [Test] public void Test_20_Complex() { using (StreamReader sr = new StreamReader("complex.txt")) { complexSource = sr.ReadToEnd(); } output.WriteLine("--- complex test (no filter) ---"); output.Flush(); Token[] tokens = tokenizer.Parse(complexSource); foreach (Token token in tokens) { output.WriteLine("Token: {0,-48}: {1}",token.GetType(),token.Value); } output.Flush(); output.WriteLine("--- complex filter test ---"); output.Flush(); tokens = tokenizer.Parse(complexSource, (token) => !(token is Token.WhiteSpaceToken)); foreach (Token token in tokens) { output.WriteLine("Token: {0,-48}: {1}",token.GetType(),token.Value); } output.Flush(); } } }