本文整理汇总了C#中Tokenizer.GetNextToken方法的典型用法代码示例。如果您正苦于以下问题:C# Tokenizer.GetNextToken方法的具体用法?C# Tokenizer.GetNextToken怎么用?C# Tokenizer.GetNextToken使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Tokenizer
的用法示例。
在下文中一共展示了Tokenizer.GetNextToken方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Test1
static void Test1()
{
Tokenizer tokenizer = new Tokenizer(TextReader.Null);
//tokenizer.Initialize(new StringReader("EOT;\r\n"), LexicalStates.ST_HEREDOC, true);
//tokenizer.hereDocLabel = "EOT";
//tokenizer.Initialize(new StringReader("/*\r\n*/"), LexicalStates.ST_IN_SCRIPTING, true);
//tokenizer.hereDocLabel = null;
//tokenizer.Initialize(new StringReader("$x = 1; ###\r\n $y = 2;"), LexicalStates.ST_IN_SCRIPTING, true);
//tokenizer.hereDocLabel = null;
//tokenizer.Initialize(new StringReader("<? $x = array(); ?>"), LexicalStates.INITIAL, true);
//tokenizer.hereDocLabel = null;
// 111111111
// 0123456789012345678
//string s = "echo 'aě'.'řa'.'x';";
string s = "echo 'asdě' . 'řčřžý' . 'موقع للأخبا' . 'האתר' . 'as';";
//string s = "echo 'abřc'.'e'/*xx\n\nyy\nxxx */;";
byte[] buffer = new byte[1000];
byte[] b = Encoding.UTF8.GetBytes(s);
Stream stream = new MemoryStream(b);
tokenizer.Initialize(new StreamReader(stream), PHP.Core.Parsers.Lexer.LexicalStates.ST_IN_SCRIPTING, true);
//int b_start = 0;
//int b_end = -1;
//int b_length = 0;
Tokens token;
for (; ; )
{
token = tokenizer.GetNextToken();
// check byte position matches:
//b_length = tokenizer.GetTokenByteLength(Encoding.UTF8);
//b_start = b_end + 1;
//b_end += b_length;
//// check binary positions:
//long pos = stream.Position;
//stream.Seek(b_start, SeekOrigin.Begin);
//stream.Read(buffer, 0, b_length);
//stream.Seek(pos, SeekOrigin.Begin);
//Debug.Assert(String.CompareOrdinal(Encoding.UTF8.GetString(buffer, 0, b_length), tokenizer.TokenText) == 0);
if (token == Tokens.EOF) break;
//// check position:
//Assert.AreEqual(s.Substring(tokenizer.token_start_pos.Char, tokenizer.TokenLength), tokenizer.TokenText);
////
//Console.WriteLine("{0} '{1}' ({2}..{3}]", token, tokenizer.TokenText, tokenizer.token_start_pos.Char, tokenizer.token_end_pos.Char);
}
}
示例2: TestTokenizingStringWithUnexpectedCharacters
public void TestTokenizingStringWithUnexpectedCharacters()
{
const string data = "@";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Assert.That(() => { Token<TokenType> nextToken = tokenizer.GetNextToken(); },
Throws.TypeOf(typeof(UnexpectedCharEncounteredException)));
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例3: TestTokenizingEmptyStream
public void TestTokenizingEmptyStream()
{
const string data = "";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken, Is.Null);
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例4: TestTokenizingSymbolsNotIgnoringWhitespace
public void TestTokenizingSymbolsNotIgnoringWhitespace()
{
const string data = "\t\t\t symbol1 \t \t \t \nsymbol2 ";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Token<TokenType> nextToken = tokenizer.GetNextToken(false);
Assert.That(nextToken.Value, Is.EqualTo("\t\t\t "));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.WHITESPACE));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken(false);
Assert.That(nextToken.Value, Is.EqualTo("symbol1"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(14));
nextToken = tokenizer.GetNextToken(false);
Assert.That(nextToken.Value, Is.EqualTo(" \t \t \t \n"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.WHITESPACE));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(21));
nextToken = tokenizer.GetNextToken(false);
Assert.That(nextToken.Value, Is.EqualTo("symbol2"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(2));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken(false);
Assert.That(nextToken.Value, Is.EqualTo(" "));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.WHITESPACE));
Assert.That(nextToken.LineNumber, Is.EqualTo(2));
Assert.That(nextToken.PositionInLine, Is.EqualTo(8));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken, Is.Null);
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例5: ParseSectorHeader
private Sector ParseSectorHeader(Tokenizer levelTokens)
{
Sector sector = new Sector();
levelTokens.EnsureNextToken("NAME");
string sectorName = levelTokens.GetNextToken();
if (sectorName != "AMBIENT") {
sector.Name = sectorName;
} else { // unnamed sector
levelTokens.UngetToken();
}
levelTokens.EnsureNextToken("AMBIENT");
sector.Ambient = levelTokens.RequireNextInt();
levelTokens.EnsureNextToken("FLOOR");
levelTokens.EnsureNextToken("TEXTURE");
sector.FloorTex = levelTokens.RequireNextInt();
sector.FloorShiftX = levelTokens.RequireNextFloat();
sector.FloorShiftZ = levelTokens.RequireNextFloat();
levelTokens.RequireNextInt();
levelTokens.EnsureNextToken("FLOOR");
levelTokens.EnsureNextToken("ALTITUDE");
sector.FloorAlt = -levelTokens.RequireNextFloat();
levelTokens.EnsureNextToken("CEILING");
levelTokens.EnsureNextToken("TEXTURE");
sector.CeilTex = levelTokens.RequireNextInt();
sector.CeilShiftX = levelTokens.RequireNextFloat();
sector.CeilShiftZ = levelTokens.RequireNextFloat();
levelTokens.RequireNextInt();
levelTokens.EnsureNextToken("CEILING");
levelTokens.EnsureNextToken("ALTITUDE");
sector.CeilAlt = -levelTokens.RequireNextFloat();
levelTokens.EnsureNextToken("SECOND");
levelTokens.EnsureNextToken("ALTITUDE");
sector.SecondAlt = -levelTokens.RequireNextFloat();
levelTokens.EnsureNextToken("FLAGS");
sector.Flags0 = (Sector.EFlags0)levelTokens.RequireNextInt();
sector.Flags1 = levelTokens.RequireNextInt();
sector.Flags2 = levelTokens.RequireNextInt();
levelTokens.EnsureNextToken("LAYER");
sector.Layer = levelTokens.RequireNextInt();
return sector;
}
示例6: TestTokenizingSymbols
public void TestTokenizingSymbols()
{
const string data = "symbol1 symbol2";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("symbol1"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("symbol2"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(9));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken, Is.Null);
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例7: TestTokenizingStringLiteralWithEscapeCharacter
public void TestTokenizingStringLiteralWithEscapeCharacter()
{
const string data = @"""str\""test"" ""\\""";
const char stringLiteralPunctuator = '"';
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner)
{
StringLiteralPunctuator = stringLiteralPunctuator,
StringEscapeCharacter = '\\'
};
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("\"str\\\"test\""));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.STRING_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("\"\\\\\""));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.STRING_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(13));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken, Is.Null);
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例8: TestTokenizingCharLiteralWithEscapeCharacter
public void TestTokenizingCharLiteralWithEscapeCharacter()
{
const string data = @"'\''";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner)
{
CharLiteralPunctuator = '\'',
StringEscapeCharacter = '\\'
};
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("'\\''"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.CHAR_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken, Is.Null);
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例9: TestFailingTokenizingEndComment
public void TestFailingTokenizingEndComment()
{
const string data = "test */";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner)
{
CommentDelimiters = new[] { new CommentDelimiters("/*", "*/") }
};
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("test"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
Assert.That(() => nextToken = tokenizer.GetNextToken(), Throws.TypeOf(typeof(UnexpectedCharEncounteredException)));
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例10: TestTokenizingNumericLiterals
public void TestTokenizingNumericLiterals()
{
const string data = "1 123 0x123 0 0x0 -9 5.0 0.0 5.1 -3.5 0xABC 4.5e8 -4.5e8 0x0L 0xABCL 0x123L"
+ " 0x0UL 0xABCUL 0x123UL 12e4L 0. .0 0x0ee 0xFFFFFFFFULL 1.20f";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("1"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("123"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(3));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0x123"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(7));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(13));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0x0"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(15));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("-9"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(19));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("5.0"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(22));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0.0"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(26));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("5.1"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(30));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("-3.5"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(34));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0xABC"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(39));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("4.5e8"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(45));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("-4.5e8"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(51));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0x0L"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(58));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("0xABCL"));
//.........这里部分代码省略.........
示例11: TestFailingAtTokenizingSymbolsTerminatedByNonPunctuatorOrWhitespace
public void TestFailingAtTokenizingSymbolsTerminatedByNonPunctuatorOrWhitespace()
{
const string data = "symbol'";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Assert.That(() => { Token<TokenType> nextToken = tokenizer.GetNextToken(); },
Throws.TypeOf(typeof(UnexpectedCharEncounteredException)));
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例12: TestSkippingToNextLineOfTokensWhenTokenTerminatedByNewLine
public void TestSkippingToNextLineOfTokensWhenTokenTerminatedByNewLine()
{
const string data = " symbol1\nsymbol2";
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner);
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("symbol1"));
tokenizer.SkipToNextLine();
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("symbol2"));
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例13: TestTokenizingTokensTerminatedByPunctuators
public void TestTokenizingTokensTerminatedByPunctuators()
{
const string data = "keyword(symbol1, 123, 'str')";
string punctuators = ",()";
const char stringLiteralPunctuator = '\'';
MemoryStream stream = null;
try
{
stream = new MemoryStream(Encoding.ASCII.GetBytes(data));
Scanner scanner = new Scanner(stream);
stream = null;
Tokenizer tokenizer = new Tokenizer(scanner)
{
Punctuators = punctuators,
StringLiteralPunctuator = stringLiteralPunctuator
};
Token<TokenType> nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("keyword"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(1));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("("));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.PUNCTUATOR));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(8));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("symbol1"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.SYMBOL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(9));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo(","));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.PUNCTUATOR));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(16));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("123"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.NUMERIC_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(18));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo(","));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.PUNCTUATOR));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(21));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo("'str'"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.STRING_LITERAL));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(23));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken.Value, Is.EqualTo(")"));
Assert.That(nextToken.Type, Is.EqualTo(TokenType.PUNCTUATOR));
Assert.That(nextToken.LineNumber, Is.EqualTo(1));
Assert.That(nextToken.PositionInLine, Is.EqualTo(28));
nextToken = tokenizer.GetNextToken();
Assert.That(nextToken, Is.Null);
}
finally
{
if (stream != null)
stream.Dispose();
}
}
示例14: ParseTextures
private void ParseTextures(Tokenizer levelTokens)
{
levelTokens.EnsureNextToken("TEXTURES");
levelTokens.RequireNextInt();
while (true) {
if (levelTokens.IsNextToken("NUMSECTORS")) {
levelTokens.RequireNextInt(); // eat sector count.
break;
}
string texture = levelTokens.GetNextToken();
_textures.Add(texture);
}
}
示例15: ParseSectors
private void ParseSectors(Tokenizer levelTokens)
{
string token;
while ((token = levelTokens.GetNextToken()) != null) {
levelTokens.CheckThrow(token == "SECTOR", "Expected SECTOR!");
levelTokens.RequireNextInt();
Sector sector = ParseSectorHeader(levelTokens);
ParseSectorVertices(sector, levelTokens);
ParseSectorWalls(sector, levelTokens);
Sectors.Add(sector);
}
}