123 lines
5.3 KiB
C#
123 lines
5.3 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.IO;
|
|
using System.Reflection;
|
|
using ln.parse.tokenizer;
|
|
using NUnit.Framework;
|
|
|
|
namespace ln.parse.tests
|
|
{
|
|
public class TokenizerTests
|
|
{
|
|
StreamWriter output = new StreamWriter(Console.OpenStandardOutput());
|
|
|
|
Tokenizer tokenizer = Tokenizer.CreateDefaultTokenizer();
|
|
|
|
KeyValuePair<string,Type>[] primitiveTests = new KeyValuePair<string, Type>[]{
|
|
new KeyValuePair<string,Type>("0",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("1",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("2",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("3",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("4",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("5",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("6",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("7",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("8",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("9",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("10",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("100",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("453",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("75239475",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("99999999",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("-15362",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("-1",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("-2",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("-3",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("-4",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("-5",typeof(Token.IntegerToken)),
|
|
new KeyValuePair<string,Type>("0.0",typeof(Token.FloatToken)),
|
|
new KeyValuePair<string,Type>("-123.456",typeof(Token.FloatToken)),
|
|
new KeyValuePair<string,Type>("123.456",typeof(Token.FloatToken)),
|
|
new KeyValuePair<string,Type>("987463.234636",typeof(Token.FloatToken)),
|
|
new KeyValuePair<string,Type>("-352594.2373782",typeof(Token.FloatToken)),
|
|
new KeyValuePair<string,Type>("\"Hallo Welt, ich bin ein \\\"String\\\"!\"",typeof(Token.StringToken)),
|
|
new KeyValuePair<string,Type>("\"a simple string\"",typeof(Token.StringToken)),
|
|
new KeyValuePair<string,Type>("\"that's it, I can string\"",typeof(Token.StringToken)),
|
|
new KeyValuePair<string,Type>("(",typeof(Token.BracketToken)),
|
|
new KeyValuePair<string,Type>(")",typeof(Token.BracketToken)),
|
|
new KeyValuePair<string,Type>("[",typeof(Token.BracketToken)),
|
|
new KeyValuePair<string,Type>("]",typeof(Token.BracketToken)),
|
|
new KeyValuePair<string,Type>("{",typeof(Token.BracketToken)),
|
|
new KeyValuePair<string,Type>("}",typeof(Token.BracketToken)),
|
|
new KeyValuePair<string,Type>("\t",typeof(Token.WhiteSpaceToken)),
|
|
new KeyValuePair<string,Type>("Ich",typeof(Token.IdentifierToken)),
|
|
new KeyValuePair<string,Type>("IchBinEinIdentifier",typeof(Token.IdentifierToken)),
|
|
new KeyValuePair<string,Type>(" ",typeof(Token.WhiteSpaceToken))
|
|
};
|
|
|
|
|
|
[Test]
|
|
public void Test_00_Token()
|
|
{
|
|
Assert.Pass();
|
|
}
|
|
|
|
[Test]
|
|
public void Test_10_Primitives()
|
|
{
|
|
foreach (KeyValuePair<string,Type> primTest in primitiveTests)
|
|
{
|
|
output.WriteLine("Primitive Test: {0} => {1}", primTest.Key, primTest.Value);
|
|
output.Flush();
|
|
|
|
Token[] token = tokenizer.Parse(primTest.Key);
|
|
|
|
output.WriteLine("Token Source: {0}", token[0].TokenSource);
|
|
output.WriteLine("Token Value: {0}", token[0].Value);
|
|
output.Flush();
|
|
|
|
Assert.AreEqual(1, token.Length);
|
|
Assert.AreEqual(primTest.Value, token[0].GetType());
|
|
Assert.AreEqual(primTest.Key, token[0].TokenSource);
|
|
}
|
|
|
|
Assert.Pass();
|
|
}
|
|
|
|
string complexSource = null;
|
|
|
|
[Test]
|
|
public void Test_20_Complex()
|
|
{
|
|
using (StreamReader sr = new StreamReader("complex.txt"))
|
|
{
|
|
complexSource = sr.ReadToEnd();
|
|
}
|
|
|
|
output.WriteLine("--- complex test (no filter) ---");
|
|
output.Flush();
|
|
|
|
Token[] tokens = tokenizer.Parse(complexSource);
|
|
|
|
foreach (Token token in tokens)
|
|
{
|
|
output.WriteLine("Token: {0,-48}: {1}",token.GetType(),token.Value);
|
|
}
|
|
output.Flush();
|
|
|
|
output.WriteLine("--- complex filter test ---");
|
|
output.Flush();
|
|
|
|
tokens = tokenizer.Parse(complexSource, (token) => !(token is Token.WhiteSpaceToken));
|
|
|
|
foreach (Token token in tokens)
|
|
{
|
|
output.WriteLine("Token: {0,-48}: {1}",token.GetType(),token.Value);
|
|
}
|
|
output.Flush();
|
|
|
|
}
|
|
|
|
|
|
}
|
|
} |