本文整理汇总了C#中Microsoft.VisualStudio.Package.TokenInfo类的典型用法代码示例。如果您正苦于以下问题:C# TokenInfo类的具体用法?C# TokenInfo怎么用?C# TokenInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TokenInfo类属于Microsoft.VisualStudio.Package命名空间,在下文中一共展示了TokenInfo类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: WhenAttemptingToGetATokenFromThePegLexer
public WhenAttemptingToGetATokenFromThePegLexer()
: base()
{
lexer = MockRepository.GenerateMock<BooTokenLexer>();
token = new TokenInfo();
pegToken = new ColorizerToken();
}
示例2: GetDeclarations
// ParseReason.CompleteWord
// ParseReason.DisplayMemberList
// ParseReason.MemberSelect
// ParseReason.MemberSelectAndHilightBraces
public override Microsoft.VisualStudio.Package.Declarations GetDeclarations(IVsTextView view, int line, int col, TokenInfo info, ParseReason reason)
{
IList<Declaration> declarations;
switch (reason)
{
case ParseReason.CompleteWord:
var tokenInfo = GetTokenInfoOfFirstTokenOnLine(view, line, col);
if (tokenInfo.Token == (int)GherkinTerm.Step)
declarations = resolver.FindMembers(StepProvider, Grammar, line, col);
else
declarations = resolver.FindCompletions(StepProvider, Grammar, line, col);
break;
case ParseReason.DisplayMemberList:
case ParseReason.MemberSelect:
case ParseReason.MemberSelectAndHighlightBraces:
declarations = resolver.FindMembers(StepProvider, Grammar, line, col);
break;
default:
throw new ArgumentException("reason");
}
return new Declarations(declarations);
}
示例3: ScanTokenAndProvideInfoAboutIt
/// <summary>
/// This method is used to parse next language token from the current line and return information about it.
/// </summary>
/// <param name="tokenInfo"> The TokenInfo structure to be filled in.</param>
/// <param name="state"> The scanner's current state value.</param>
/// <returns>Returns true if a token was parsed from the current line and information returned;
/// otherwise, returns false indicating no more tokens are on the current line.</returns>
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state)
{
// If input string is empty - there is nothing to parse - so, return false
if (sourceString.Length == 0)
{
return false;
}
TokenColor color = TokenColor.Text;
int charsMatched = 0;
// Compare input string with patterns from correspondence table
MatchRegEx(sourceString, ref charsMatched, ref color);
// Fill in TokenInfo structure on the basis of examination
if (tokenInfo != null)
{
tokenInfo.Color = color;
tokenInfo.Type = TokenType.Text;
tokenInfo.StartIndex = currentPos;
tokenInfo.EndIndex = Math.Max(currentPos, currentPos + charsMatched - 1);
}
// Move current position
currentPos += charsMatched;
// Set an unprocessed part of string as a source
sourceString = sourceString.Substring(charsMatched);
return true;
}
示例4: ScanTokenAndProvideInfoAboutIt
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state)
{
if (_lexer == null)
return false;
ScanTokenInfo info = _lexer.GetToken((ScanState)state);
if (info == null)
return false;
state = (int)info.State;
_lastColor = (TokenColor)info.Color;
_colorizeEnd = info.ColorizeEnd;
tokenInfo.Color = _lastColor;
tokenInfo.Type = (TokenType) info.Type;
tokenInfo.Trigger = (TokenTriggers)info.Triggers;
if (info.Token == null)
return false;
tokenInfo.StartIndex = info.Token.Location.Column - 1;
tokenInfo.EndIndex = info.Token.Location.EndColumn - 2;
return !info.IsEndOfLine;
}
示例5: ScanTokenAndProvideInfoAboutIt
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state)
{
//Debug.Print("LineScanner.ScanToken({1}) using {0}", _parser != null && _parser.Language != null ? _parser.Language.Grammar : null, state);
// Reads each token in a source line and performs syntax coloring. It will continue to
// be called for the source until false is returned.
//Debug.Print("reading token from {0}", _parser.Context != null && _parser.Context.Source != null ? _parser.Context.Source.Text : "<null>");
if (_queuedTokens.Count > 0)
{
UpdateTokenInfoFromQueue(tokenInfo);
return Returning(tokenInfo, state, _previousToken, true);
}
Token token = _parser.Scanner.VsReadToken(ref state);
// !EOL and !EOF
if (token != null && token.Terminal != GherkinGrammar.CurrentGrammar.Eof && token.Category != TokenCategory.Error && token.Length > 0)
{
tokenInfo.StartIndex = token.Location.Position;
tokenInfo.EndIndex = tokenInfo.StartIndex + token.Length - 1;
var gherkinKeyTerm = token.KeyTerm as GherkinKeyTerm;
if (gherkinKeyTerm != null)
tokenInfo.Token = (int)gherkinKeyTerm.Term;
SetColorAndType(token, tokenInfo);
SetTrigger(token, tokenInfo);
ProcessStepIdentifiers(token, tokenInfo);
return Returning(tokenInfo, state, token, true);
}
return Returning(tokenInfo, state, token, false);
}
示例6: GetNextToken
public bool GetNextToken(TokenInfo tokenInfo, ref ParseState state)
{
bool foundToken = false;
int index = _offset;
if (index >= _source.Length) return false;
int endIndex = -1;
switch (state)
{
case ParseState.InSingleQuotes:
state = HandleSingleQuotes(out endIndex, ref foundToken);
break;
case ParseState.InDoubleQuotes:
state = HandleDoubleQuotes(out endIndex, ref foundToken);
break;
case ParseState.InSingleQuoteHeredoc:
state = HandleHeredoc('\'', state, out endIndex);
break;
case ParseState.InDoubleQuoteHeredoc:
state = HandleHeredoc('"', state, out endIndex);
break;
case ParseState.InBlockComment:
state = HandleBlockComment(out endIndex);
break;
case ParseState.InText:
state = HandleToken(ref foundToken, ref endIndex);
break;
}
tokenInfo.EndIndex = endIndex;
_offset = endIndex + 1;
return foundToken;
}
示例7: ResolveBooTokenStartAndEndIndex
public void ResolveBooTokenStartAndEndIndex(antlr.CommonToken token, TokenInfo tokenInfo)
{
int oneCharBack = token.getColumn() - 1;
int lengthOfTokenText = token.getText() == null ? 0 : token.getText().Length;
int oneCharAfterToken = token.getColumn() + lengthOfTokenText;
// single quoted string
if (token.Type == BooLexer.SINGLE_QUOTED_STRING || token.Type == BooLexer.DOUBLE_QUOTED_STRING)
{
tokenInfo.StartIndex = oneCharBack;
tokenInfo.EndIndex = oneCharAfterToken;
}
else if (token.Type == BooLexer.TRIPLE_QUOTED_STRING)
{
tokenInfo.StartIndex = oneCharBack;
tokenInfo.EndIndex = oneCharBack+ 5 + token.getText().Length;
}
else if (token.Type == 1)
{
return;
}
else
{
tokenInfo.StartIndex = oneCharBack;
tokenInfo.EndIndex = oneCharBack + (token.getText().Length - 1);
}
}
示例8: WhenAttemptingToGetATokenFromThePegLexer
public WhenAttemptingToGetATokenFromThePegLexer()
: base()
{
lexer = Mocks.PartialMock<PegLexer>();
token = new TokenInfo();
pegToken = new PegToken();
}
示例9: ColorizeLine
public override int ColorizeLine(int line, int length, IntPtr ptr, int state, uint[] attrs)
{
if (attrs == null) return state;
int linepos = 0;
// Must initialize the colors in all cases, otherwise you get
// random color junk on the screen.
for (linepos = 0; linepos < attrs.Length; linepos++)
attrs[linepos] = (uint)TokenColor.Text;
if (this.Scanner != null)
{
try
{
string text = Marshal.PtrToStringUni(ptr, length);
this.Scanner.SetSource(text, 0);
TokenInfo tokenInfo = new TokenInfo();
tokenInfo.EndIndex = -1;
while (this.Scanner.ScanTokenAndProvideInfoAboutIt(tokenInfo, ref state))
{
for (linepos = tokenInfo.StartIndex; linepos <= tokenInfo.EndIndex; linepos++)
{
if (linepos >= 0 && linepos < attrs.Length)
{
attrs[linepos] = (uint)tokenInfo.Color;
}
}
}
}
catch (Exception)
{
// Ignore exceptions
}
}
return state;
}
示例10: ScanTokenAndProvideInfoAboutIt
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state)
{
try
{
_reusableToken = lexer.nextToken() as antlr.CommonToken;
}
catch (antlr.TokenStreamRecognitionException e)
{
// supress that shiiet
}
// resolve token start and stop positions. Need
// to do this first.
ResolveBooTokenStartAndEndIndex(_reusableToken, tokenInfo);
// if we get an EOF token, we're done.
if (_reusableToken.Type == 1)
{
tokenInfo.Type = TokenType.WhiteSpace;
// if we're in a ML_COMMENT zone, let's
// just make sure that everything is
// parsed as a comment...
if (state == 13)
{
tokenInfo.StartIndex = 0;
tokenInfo.EndIndex = _currentLine.Length;
tokenInfo.Type = TokenType.Comment;
tokenInfo.Color = TokenColor.Comment;
}
return false;
}
else if (state == 13)
{
_reusableToken.Type = BooLexer.ML_COMMENT;
}
if (_reusableToken.Type == BooLexer.ML_COMMENT)
{
state = 13;
// how to determine if we're "out" of the ML_COMMENT?
// if the token's getText() ends with "", then we
// should assume that we're ending an ML_COMMENT region..
if (_reusableToken.getFilename().Equals("LEAVINGML_COMMENT"))
{
state = 0;
}
// handle an issue where we're hitting an endless loop
// in the parser
if (_reusableToken.Type == 120 && _reusableToken.getColumn() >= _currentLine.Length)
{
tokenInfo.Type = TokenType.WhiteSpace;
return false;
}
}
// set up token color and type
ResolveBooTokenTypeAndColor(_reusableToken, tokenInfo);
return true;
}
示例11: ColorizeLine
public virtual int ColorizeLine(int line, int length, IntPtr ptr, int state, uint[] attrs) {
if (this.languageService == null) return 0;
if (this.scanner == null) return 0;
string text = Marshal.PtrToStringUni(ptr, length);
this.scanner.SetSource(text, 0);
TokenInfo tokenInfo = new TokenInfo();
tokenInfo.endIndex = -1;
bool firstTime = true;
int linepos = 0;
while (this.scanner.ScanTokenAndProvideInfoAboutIt(tokenInfo, ref state)){
if (firstTime){
if (attrs != null && tokenInfo.startIndex > 0) {
for (linepos = 0; linepos < tokenInfo.startIndex-1; linepos++)
attrs[linepos] = (uint)TokenColor.Text;
}
firstTime = false;
}
if (attrs != null){
for (; linepos < tokenInfo.startIndex; linepos++)
attrs[linepos] = (uint)TokenColor.Text;
for (; linepos <= tokenInfo.endIndex; linepos++)
attrs[linepos] = (uint)tokenInfo.color;
}
}
if (linepos < length-1 && attrs != null) {
for (; linepos < length; linepos++)
attrs[linepos] = (uint)TokenColor.Text;
}
return state;
}
示例12: ScanTokenAndProvideInfoAboutIt
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state)
{
// Reads each token in a source line and performs syntax coloring. It will continue to
// be called for the source until false is returned.
Token token = parser.Scanner.VsReadToken(ref state);
// !EOL and !EOF
if (token != null && token.Terminal != Grammar.CurrentGrammar.Eof && token.Category != TokenCategory.Error)
{
tokenInfo.StartIndex = token.Location.Position;
tokenInfo.EndIndex = tokenInfo.StartIndex + token.Length - 1;
if (token.EditorInfo != null) {
tokenInfo.Color = (Microsoft.VisualStudio.Package.TokenColor)token.EditorInfo.Color;
tokenInfo.Type = (Microsoft.VisualStudio.Package.TokenType)token.EditorInfo.Type;
}
if (token.KeyTerm != null && token.KeyTerm.EditorInfo != null)
{
tokenInfo.Trigger =
(Microsoft.VisualStudio.Package.TokenTriggers)token.KeyTerm.EditorInfo.Triggers;
}
else
{
if (token.EditorInfo != null) {
tokenInfo.Trigger =
(Microsoft.VisualStudio.Package.TokenTriggers)token.EditorInfo.Triggers;
}
}
return true;
}
return false;
}
示例13: WhenTranslatingTokens
public WhenTranslatingTokens()
: base()
{
pegToken = new ColorizerToken();
ideToken = new TokenInfo();
scanner = new BooScanner();
}
示例14: BoolTypeReference
public void BoolTypeReference()
{
var results = RunCompiler(
@"a as bool"
);
var mToken = results.GetMappedToken(0, 5);
TextSpan ts = new TextSpan();
TextSpan expected = new TextSpan();
expected.iStartLine = 0;
expected.iEndLine = 0;
expected.iStartIndex = 5;
expected.iEndIndex = 9;
Assert.NotNull(mToken);
Assert.AreEqual(2, mToken.Nodes.Count);
Assert.IsInstanceOf(typeof(MappedTypeReference), (mToken.Nodes[1]));
Assert.AreEqual("struct bool", mToken.GetDataTiptext(out ts));
Assert.AreEqual(ts, expected);
mToken.Goto(out ts);
TokenInfo ti = new TokenInfo();
ParseReason pr = new ParseReason();
mToken.GetDeclarations(ti, pr);
}
示例15: ScanTokenAndProvideInfoAboutIt
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state)
{
int start, end;
ShaderToken token = (ShaderToken)lex.GetNext(ref state, out start, out end);
// !EOL and !EOF
if (token != ShaderToken.EOF)
{
tokenInfo.StartIndex = start;
tokenInfo.EndIndex = end;
switch (token)
{
case ShaderToken.KEYWORD:
case ShaderToken.TYPE:
case ShaderToken.KEYWORD_FX:
tokenInfo.Color = TokenColor.Keyword;
tokenInfo.Type = TokenType.Keyword;
break;
case ShaderToken.COMMENT:
tokenInfo.Color = TokenColor.Comment;
tokenInfo.Type = TokenType.Comment;
break;
case ShaderToken.COMMENT_LINE:
tokenInfo.Color = TokenColor.Comment;
tokenInfo.Type = TokenType.LineComment;
break;
case ShaderToken.NUMBER:
case ShaderToken.FLOAT:
tokenInfo.Color = TokenColor.Number;
tokenInfo.Type = TokenType.Literal;
break;
case ShaderToken.STRING_LITERAL:
tokenInfo.Color = TokenColor.String;
tokenInfo.Type = TokenType.Literal;
break;
case ShaderToken.INTRINSIC:
// hugly. TODO generate a NShaderTokenColor to keep tracks of 6-7-8 TokenColors
tokenInfo.Color = (TokenColor)6;
tokenInfo.Type = TokenType.Identifier;
break;
case ShaderToken.KEYWORD_SPECIAL:
tokenInfo.Color = (TokenColor)7;
tokenInfo.Type = TokenType.Identifier;
break;
case ShaderToken.PREPROCESSOR:
tokenInfo.Color = (TokenColor)8;
tokenInfo.Type = TokenType.Keyword;
break;
default:
tokenInfo.Color = TokenColor.Text;
tokenInfo.Type = TokenType.Text;
break;
}
return true;
}
return false;
}