当前位置: 首页>>代码示例>>C++>>正文


C++ Lexer::LexFromRawLexer方法代码示例

本文整理汇总了C++中Lexer::LexFromRawLexer方法的典型用法代码示例。如果您正苦于以下问题:C++ Lexer::LexFromRawLexer方法的具体用法?C++ Lexer::LexFromRawLexer怎么用?C++ Lexer::LexFromRawLexer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Lexer的用法示例。


在下文中一共展示了Lexer::LexFromRawLexer方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: NextIdentifierName

/// Find the next identifier in the pragma directive specified by \p RawToken.
StringRef InclusionRewriter::NextIdentifierName(Lexer &RawLex,
                                                Token &RawToken) {
  RawLex.LexFromRawLexer(RawToken);
  if (RawToken.is(tok::raw_identifier))
    PP.LookUpIdentifierInfo(RawToken);
  if (RawToken.is(tok::identifier))
    return RawToken.getIdentifierInfo()->getName();
  return StringRef();
}
开发者ID:Godin,项目名称:clang,代码行数:10,代码来源:InclusionRewriter.cpp

示例2: checkAndConsumeInclusiveDirective

// Check if a sequence of tokens is like
//    "#include ("header.h" | <header.h>)".
// If it is, \p Tok will be the token after this directive; otherwise, it can be
// any token after the given \p Tok (including \p Tok).
bool checkAndConsumeInclusiveDirective(Lexer &Lex, Token &Tok) {
  auto Matched = [&]() {
    Lex.LexFromRawLexer(Tok);
    return true;
  };
  if (Tok.is(tok::hash) && !Lex.LexFromRawLexer(Tok) &&
      Tok.is(tok::raw_identifier) && Tok.getRawIdentifier() == "include") {
    if (Lex.LexFromRawLexer(Tok))
      return false;
    if (Tok.is(tok::string_literal))
      return Matched();
    if (Tok.is(tok::less)) {
      while (!Lex.LexFromRawLexer(Tok) && Tok.isNot(tok::greater)) {
      }
      if (Tok.is(tok::greater))
        return Matched();
    }
  }
  return false;
}
开发者ID:Teemperor,项目名称:clang,代码行数:24,代码来源:HeaderIncludes.cpp

示例3: CommentOutDirective

/// Print characters from \p FromFile starting at \p NextToWrite up until the
/// inclusion directive at \p StartToken, then print out the inclusion
/// inclusion directive disabled by a #if directive, updating \p NextToWrite
/// and \p Line to track the number of source lines visited and the progress
/// through the \p FromFile buffer.
void InclusionRewriter::CommentOutDirective(Lexer &DirectiveLex,
                                            const Token &StartToken,
                                            const MemoryBuffer &FromFile,
                                            StringRef EOL,
                                            unsigned &NextToWrite, int &Line) {
  OutputContentUpTo(FromFile, NextToWrite,
    SM.getFileOffset(StartToken.getLocation()), EOL, Line);
  Token DirectiveToken;
  do {
    DirectiveLex.LexFromRawLexer(DirectiveToken);
  } while (!DirectiveToken.is(tok::eod) && DirectiveToken.isNot(tok::eof));
  OS << "#if 0 /* expanded by -frewrite-includes */" << EOL;
  OutputContentUpTo(FromFile, NextToWrite,
    SM.getFileOffset(DirectiveToken.getLocation()) + DirectiveToken.getLength(),
    EOL, Line);
  OS << "#endif /* expanded by -frewrite-includes */" << EOL;
}
开发者ID:Godin,项目名称:clang,代码行数:22,代码来源:InclusionRewriter.cpp

示例4: CommentOutDirective

/// Print characters from \p FromFile starting at \p NextToWrite up until the
/// inclusion directive at \p StartToken, then print out the inclusion
/// inclusion directive disabled by a #if directive, updating \p NextToWrite
/// and \p Line to track the number of source lines visited and the progress
/// through the \p FromFile buffer.
void InclusionRewriter::CommentOutDirective(Lexer &DirectiveLex,
                                            const Token &StartToken,
                                            const MemoryBuffer &FromFile,
                                            StringRef LocalEOL,
                                            unsigned &NextToWrite, int &Line) {
  OutputContentUpTo(FromFile, NextToWrite,
                    SM.getFileOffset(StartToken.getLocation()), LocalEOL, Line,
                    false);
  Token DirectiveToken;
  do {
    DirectiveLex.LexFromRawLexer(DirectiveToken);
  } while (!DirectiveToken.is(tok::eod) && DirectiveToken.isNot(tok::eof));
  if (&FromFile == PredefinesBuffer) {
    // OutputContentUpTo() would not output anything anyway.
    return;
  }
  OS << "#if 0 /* expanded by -frewrite-includes */" << MainEOL;
  OutputContentUpTo(FromFile, NextToWrite,
                    SM.getFileOffset(DirectiveToken.getLocation()) +
                        DirectiveToken.getLength(),
                    LocalEOL, Line, true);
  OS << "#endif /* expanded by -frewrite-includes */" << MainEOL;
}
开发者ID:FrozenGene,项目名称:clang_trunk,代码行数:28,代码来源:InclusionRewriter.cpp

示例5: LexTokens

PTHEntry PTHWriter::LexTokens(Lexer& L) {
  // Pad 0's so that we emit tokens to a 4-byte alignment.
  // This speed up reading them back in.
  Pad(Out, 4);
  Offset TokenOff = (Offset) Out.tell();

  // Keep track of matching '#if' ... '#endif'.
  typedef std::vector<std::pair<Offset, unsigned> > PPCondTable;
  PPCondTable PPCond;
  std::vector<unsigned> PPStartCond;
  bool ParsingPreprocessorDirective = false;
  Token Tok;

  do {
    L.LexFromRawLexer(Tok);
  NextToken:

    if ((Tok.isAtStartOfLine() || Tok.is(tok::eof)) &&
        ParsingPreprocessorDirective) {
      // Insert an eod token into the token cache.  It has the same
      // position as the next token that is not on the same line as the
      // preprocessor directive.  Observe that we continue processing
      // 'Tok' when we exit this branch.
      Token Tmp = Tok;
      Tmp.setKind(tok::eod);
      Tmp.clearFlag(Token::StartOfLine);
      Tmp.setIdentifierInfo(0);
      EmitToken(Tmp);
      ParsingPreprocessorDirective = false;
    }

    if (Tok.is(tok::raw_identifier)) {
      PP.LookUpIdentifierInfo(Tok);
      EmitToken(Tok);
      continue;
    }

    if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) {
      // Special processing for #include.  Store the '#' token and lex
      // the next token.
      assert(!ParsingPreprocessorDirective);
      Offset HashOff = (Offset) Out.tell();

      // Get the next token.
      Token NextTok;
      L.LexFromRawLexer(NextTok);

      // If we see the start of line, then we had a null directive "#".  In
      // this case, discard both tokens.
      if (NextTok.isAtStartOfLine())
        goto NextToken;

      // The token is the start of a directive.  Emit it.
      EmitToken(Tok);
      Tok = NextTok;

      // Did we see 'include'/'import'/'include_next'?
      if (Tok.isNot(tok::raw_identifier)) {
        EmitToken(Tok);
        continue;
      }

      IdentifierInfo* II = PP.LookUpIdentifierInfo(Tok);
      tok::PPKeywordKind K = II->getPPKeywordID();

      ParsingPreprocessorDirective = true;

      switch (K) {
      case tok::pp_not_keyword:
        // Invalid directives "#foo" can occur in #if 0 blocks etc, just pass
        // them through.
      default:
        break;

      case tok::pp_include:
      case tok::pp_import:
      case tok::pp_include_next: {
        // Save the 'include' token.
        EmitToken(Tok);
        // Lex the next token as an include string.
        L.setParsingPreprocessorDirective(true);
        L.LexIncludeFilename(Tok);
        L.setParsingPreprocessorDirective(false);
        assert(!Tok.isAtStartOfLine());
        if (Tok.is(tok::raw_identifier))
          PP.LookUpIdentifierInfo(Tok);

        break;
      }
      case tok::pp_if:
      case tok::pp_ifdef:
      case tok::pp_ifndef: {
        // Add an entry for '#if' and friends.  We initially set the target
        // index to 0.  This will get backpatched when we hit #endif.
        PPStartCond.push_back(PPCond.size());
        PPCond.push_back(std::make_pair(HashOff, 0U));
        break;
      }
      case tok::pp_endif: {
        // Add an entry for '#endif'.  We set the target table index to itself.
//.........这里部分代码省略.........
开发者ID:liuzhiping,项目名称:clang-or1k,代码行数:101,代码来源:CacheTokens.cpp

示例6: HandleHasInclude

// Expand __has_include and __has_include_next if possible. If there's no
// definitive answer return false.
bool InclusionRewriter::HandleHasInclude(
    FileID FileId, Lexer &RawLex, const DirectoryLookup *Lookup, Token &Tok,
    bool &FileExists) {
  // Lex the opening paren.
  RawLex.LexFromRawLexer(Tok);
  if (Tok.isNot(tok::l_paren))
    return false;

  RawLex.LexFromRawLexer(Tok);

  SmallString<128> FilenameBuffer;
  StringRef Filename;
  // Since the raw lexer doesn't give us angle_literals we have to parse them
  // ourselves.
  // FIXME: What to do if the file name is a macro?
  if (Tok.is(tok::less)) {
    RawLex.LexFromRawLexer(Tok);

    FilenameBuffer += '<';
    do {
      if (Tok.is(tok::eod)) // Sanity check.
        return false;

      if (Tok.is(tok::raw_identifier))
        PP.LookUpIdentifierInfo(Tok);

      // Get the string piece.
      SmallVector<char, 128> TmpBuffer;
      bool Invalid = false;
      StringRef TmpName = PP.getSpelling(Tok, TmpBuffer, &Invalid);
      if (Invalid)
        return false;

      FilenameBuffer += TmpName;

      RawLex.LexFromRawLexer(Tok);
    } while (Tok.isNot(tok::greater));

    FilenameBuffer += '>';
    Filename = FilenameBuffer;
  } else {
    if (Tok.isNot(tok::string_literal))
      return false;

    bool Invalid = false;
    Filename = PP.getSpelling(Tok, FilenameBuffer, &Invalid);
    if (Invalid)
      return false;
  }

  // Lex the closing paren.
  RawLex.LexFromRawLexer(Tok);
  if (Tok.isNot(tok::r_paren))
    return false;

  // Now ask HeaderInfo if it knows about the header.
  // FIXME: Subframeworks aren't handled here. Do we care?
  bool isAngled = PP.GetIncludeFilenameSpelling(Tok.getLocation(), Filename);
  const DirectoryLookup *CurDir;
  const FileEntry *File = PP.getHeaderSearchInfo().LookupFile(
      Filename, SourceLocation(), isAngled, nullptr, CurDir,
      PP.getSourceManager().getFileEntryForID(FileId), nullptr, nullptr,
      nullptr, false);

  FileExists = File != nullptr;
  return true;
}
开发者ID:KeeganRen,项目名称:clang,代码行数:69,代码来源:InclusionRewriter.cpp

示例7: skipComments

void skipComments(Lexer &Lex, Token &Tok) {
  while (Tok.is(tok::comment))
    if (Lex.LexFromRawLexer(Tok))
      return;
}
开发者ID:Teemperor,项目名称:clang,代码行数:5,代码来源:HeaderIncludes.cpp


注:本文中的Lexer::LexFromRawLexer方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。