20#include "llvm/ADT/ArrayRef.h"
21#include "llvm/ADT/STLExtras.h"
22#include "llvm/Support/Debug.h"
23#include "llvm/Support/ErrorHandling.h"
24#include "llvm/Support/FormatVariadic.h"
25#include "llvm/Support/raw_ostream.h"
44 return SM.isBeforeInTranslationUnit(
T.location(), R.
getBegin());
48 return !
SM.isBeforeInTranslationUnit(R.
getEnd(),
T.location());
75 assert(
SM.getSLocEntry(TargetFile).isFile());
86 while (
First.isMacroID() &&
Last.isMacroID()) {
87 auto DecFirst =
SM.getDecomposedLoc(
First);
88 auto DecLast =
SM.getDecomposedLoc(
Last);
89 auto &ExpFirst =
SM.getSLocEntry(DecFirst.first).getExpansion();
90 auto &ExpLast =
SM.getSLocEntry(DecLast.first).getExpansion();
92 if (!ExpFirst.isMacroArgExpansion() || !ExpLast.isMacroArgExpansion())
96 if (ExpFirst.getExpansionLocStart() != ExpLast.getExpansionLocStart())
104 auto ExpFileID =
SM.getFileID(ExpFirst.getExpansionLocStart());
105 if (ExpFileID == TargetFile)
109 First = ExpFirst.getSpellingLoc().getLocWithOffset(DecFirst.second);
110 Last = ExpLast.getSpellingLoc().getLocWithOffset(DecLast.second);
117 auto DecFirst =
SM.getDecomposedExpansionLoc(Candidate.
getBegin());
118 auto DecLast =
SM.getDecomposedExpansionLoc(Candidate.
getEnd());
120 if (Candidate.
isInvalid() || DecFirst.first != TargetFile ||
121 DecLast.first != TargetFile)
125 auto Dec =
SM.getDecomposedLoc(
SM.getExpansionRange(Prev).getBegin());
126 if (
Dec.first != DecFirst.first ||
Dec.second >= DecFirst.second)
129 if (Next.isValid()) {
130 auto Dec =
SM.getDecomposedLoc(
SM.getExpansionRange(Next).getEnd());
131 if (
Dec.first != DecLast.first ||
Dec.second <= DecLast.second)
143 : Location(Location), Length(Length), Kind(Kind) {
149 assert(!
T.isAnnotation());
154 const char *Start =
SM.getCharacterData(location(), &
Invalid);
156 return llvm::StringRef(Start,
length());
160 assert(location().isFileID() &&
"must be a spelled token");
162 unsigned StartOffset;
163 std::tie(
File, StartOffset) =
SM.getDecomposedLoc(location());
172 assert(F.file() == L.file() &&
"tokens from different files");
173 assert((F == L || F.endOffset() <= L.beginOffset()) &&
174 "wrong order of tokens");
175 return FileRange(F.file(), F.beginOffset(), L.endOffset());
179 return OS <<
T.str();
184 assert(
File.isValid());
185 assert(BeginOffset <= EndOffset);
193 std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
194 End =
Begin + Length;
202 assert(
SM.getFileID(BeginLoc) ==
SM.getFileID(EndLoc));
203 assert(
SM.getFileOffset(BeginLoc) <=
SM.getFileOffset(EndLoc));
205 std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
206 End =
SM.getFileOffset(EndLoc);
211 return OS << llvm::formatv(
"FileRange(file = {0}, offsets = {1}-{2})",
222 assert(End <=
Text.size());
228 if (!ExpandedTokIndex.empty())
230 ExpandedTokIndex.reserve(ExpandedTokens.size());
232 for (
size_t I = 0,
E = ExpandedTokens.size(); I !=
E; ++I) {
235 ExpandedTokIndex[
Loc] = I;
242 if (!ExpandedTokIndex.empty()) {
246 const auto B = ExpandedTokIndex.find(R.
getBegin());
247 const auto E = ExpandedTokIndex.find(R.
getEnd());
248 if (B != ExpandedTokIndex.end() &&
E != ExpandedTokIndex.end()) {
249 const Token *L = ExpandedTokens.data() + B->getSecond();
251 const Token *R = ExpandedTokens.data() +
E->getSecond() + 1;
268std::pair<const syntax::Token *, const TokenBuffer::Mapping *>
269TokenBuffer::spelledForExpandedToken(
const syntax::Token *Expanded)
const {
271 assert(ExpandedTokens.data() <= Expanded &&
272 Expanded < ExpandedTokens.data() + ExpandedTokens.size());
274 auto FileIt = Files.find(
276 assert(FileIt != Files.end() &&
"no file for an expanded token");
278 const MarkedFile &
File = FileIt->second;
280 unsigned ExpandedIndex = Expanded - ExpandedTokens.data();
282 auto It = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
283 return M.BeginExpanded <= ExpandedIndex;
286 if (It ==
File.Mappings.begin()) {
288 return {&
File.SpelledTokens[ExpandedIndex -
File.BeginExpanded],
294 if (ExpandedIndex < It->EndExpanded)
295 return {&
File.SpelledTokens[It->BeginSpelled], &*It};
300 &
File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],
304const TokenBuffer::Mapping *
305TokenBuffer::mappingStartingBeforeSpelled(
const MarkedFile &F,
307 assert(F.SpelledTokens.data() <= Spelled);
308 unsigned SpelledI = Spelled - F.SpelledTokens.data();
309 assert(SpelledI < F.SpelledTokens.size());
311 auto It = llvm::partition_point(F.Mappings, [SpelledI](
const Mapping &M) {
312 return M.BeginSpelled <= SpelledI;
314 if (It == F.Mappings.begin())
324 const auto &
File = fileForSpelled(Spelled);
326 auto *FrontMapping = mappingStartingBeforeSpelled(
File, &Spelled.front());
327 unsigned SpelledFrontI = &Spelled.front() -
File.SpelledTokens.data();
328 assert(SpelledFrontI <
File.SpelledTokens.size());
329 unsigned ExpandedBegin;
333 ExpandedBegin =
File.BeginExpanded + SpelledFrontI;
334 }
else if (SpelledFrontI < FrontMapping->EndSpelled) {
336 if (SpelledFrontI != FrontMapping->BeginSpelled) {
341 ExpandedBegin = FrontMapping->BeginExpanded;
346 FrontMapping->EndExpanded + (SpelledFrontI - FrontMapping->EndSpelled);
349 auto *BackMapping = mappingStartingBeforeSpelled(
File, &Spelled.back());
350 unsigned SpelledBackI = &Spelled.back() -
File.SpelledTokens.data();
351 unsigned ExpandedEnd;
355 ExpandedEnd =
File.BeginExpanded + SpelledBackI + 1;
356 }
else if (SpelledBackI < BackMapping->EndSpelled) {
358 if (SpelledBackI + 1 != BackMapping->EndSpelled) {
362 ExpandedEnd = BackMapping->EndExpanded;
366 BackMapping->EndExpanded + (SpelledBackI - BackMapping->EndSpelled) + 1;
369 assert(ExpandedBegin < ExpandedTokens.size());
370 assert(ExpandedEnd < ExpandedTokens.size());
372 if (ExpandedBegin == ExpandedEnd)
375 ExpandedTokens.data() + ExpandedEnd)};
379 auto It = Files.find(FID);
380 assert(It != Files.end());
381 return It->second.SpelledTokens;
387 const auto *Tok = llvm::partition_point(
389 [&](
const syntax::Token &Tok) { return Tok.endLocation() <= Loc; });
390 if (!Tok || Loc < Tok->location())
395std::string TokenBuffer::Mapping::str()
const {
397 llvm::formatv(
"spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",
398 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded));
401std::optional<llvm::ArrayRef<syntax::Token>>
406 if (!Expanded.empty() && Expanded.back().kind() == tok::eof) {
407 Expanded = Expanded.drop_back();
411 if (Expanded.empty())
415 auto [FirstSpelled, FirstMapping] = spelledForExpandedToken(
First);
416 auto [LastSpelled, LastMapping] = spelledForExpandedToken(
Last);
420 if (FID != SourceMgr->
getFileID(LastSpelled->location()))
423 const MarkedFile &
File = Files.find(FID)->second;
427 if (FirstMapping && FirstMapping == LastMapping &&
433 : (
First - 1)->location();
436 : (
Last + 1)->location();
438 First->location(),
Last->location(), Prev, Next, FID, *SourceMgr);
441 return getTokensCovering(
File.SpelledTokens,
Range, *SourceMgr);
446 unsigned FirstExpanded = Expanded.begin() - ExpandedTokens.data();
447 unsigned LastExpanded = Expanded.end() - ExpandedTokens.data();
448 if (FirstMapping && FirstExpanded != FirstMapping->BeginExpanded)
450 if (LastMapping && LastMapping->EndExpanded != LastExpanded)
453 FirstMapping ?
File.SpelledTokens.data() + FirstMapping->BeginSpelled
455 LastMapping ?
File.SpelledTokens.data() + LastMapping->EndSpelled
460 const Mapping &M)
const {
463 F.SpelledTokens.data() + M.EndSpelled);
465 ExpandedTokens.data() + M.EndExpanded);
469const TokenBuffer::MarkedFile &
471 assert(!Spelled.empty());
472 assert(Spelled.front().location().isFileID() &&
"not a spelled token");
473 auto FileIt = Files.find(SourceMgr->
getFileID(Spelled.front().location()));
474 assert(FileIt != Files.end() &&
"file not tracked by token buffer");
475 const auto &
File = FileIt->second;
476 assert(
File.SpelledTokens.data() <= Spelled.data() &&
478 (
File.SpelledTokens.data() +
File.SpelledTokens.size()) &&
479 "Tokens not in spelled range");
481 auto T1 = Spelled.back().location();
482 auto T2 =
File.SpelledTokens.back().location();
483 assert(T1 == T2 ||
sourceManager().isBeforeInTranslationUnit(T1, T2));
488std::optional<TokenBuffer::Expansion>
491 const auto &
File = fileForSpelled(*Spelled);
493 unsigned SpelledIndex = Spelled -
File.SpelledTokens.data();
494 auto M = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
495 return M.BeginSpelled < SpelledIndex;
497 if (M ==
File.Mappings.end() || M->BeginSpelled != SpelledIndex)
499 return makeExpansion(
File, *M);
506 const auto &
File = fileForSpelled(Spelled);
509 unsigned SpelledBeginIndex = Spelled.begin() -
File.SpelledTokens.data();
510 unsigned SpelledEndIndex = Spelled.end() -
File.SpelledTokens.data();
511 auto M = llvm::partition_point(
File.Mappings, [&](
const Mapping &M) {
512 return M.EndSpelled <= SpelledBeginIndex;
514 std::vector<TokenBuffer::Expansion> Expansions;
515 for (; M !=
File.Mappings.end() && M->BeginSpelled < SpelledEndIndex; ++M)
516 Expansions.push_back(makeExpansion(
File, *M));
525 auto *Right = llvm::partition_point(
527 bool AcceptRight = Right != Tokens.end() && Right->location() <=
Loc;
529 Right != Tokens.begin() && (Right - 1)->endLocation() >=
Loc;
531 Right + (AcceptRight ? 1 : 0));
538 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(
Loc)));
545 if (Tok.kind() == tok::identifier)
555 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(
Loc)));
558std::vector<const syntax::Token *>
560 auto FileIt = Files.find(FID);
561 assert(FileIt != Files.end() &&
"file not tracked by token buffer");
562 auto &
File = FileIt->second;
563 std::vector<const syntax::Token *> Expansions;
564 auto &Spelled =
File.SpelledTokens;
565 for (
auto Mapping :
File.Mappings) {
567 if (
Token->
kind() == tok::TokenKind::identifier)
568 Expansions.push_back(
Token);
576 std::vector<syntax::Token> Tokens;
580 if (
T.getKind() == tok::raw_identifier && !
T.needsCleaning() &&
583 T.setIdentifierInfo(&II);
589 auto SrcBuffer =
SM.getBufferData(FR.
file());
590 Lexer L(
SM.getLocForStartOfFile(FR.
file()), LO, SrcBuffer.data(),
594 SrcBuffer.data() + SrcBuffer.size());
626 const auto &
SM = Collector->PP.getSourceManager();
645 if (LastExpansionEnd.isValid() &&
646 !
SM.isBeforeInTranslationUnit(LastExpansionEnd,
Range.
getEnd()))
655 "Overlapping macros should have same expansion location");
684 if (
T.isAnnotation())
686 DEBUG_WITH_TYPE(
"collect-tokens", llvm::dbgs()
697 auto CB = std::make_unique<CollectPPExpansions>(*
this);
698 this->Collector = CB.get();
706 Builder(std::vector<syntax::Token> Expanded, PPExpansions CollectedExpansions,
708 : Result(
SM), CollectedExpansions(
std::move(CollectedExpansions)),
SM(
SM),
710 Result.ExpandedTokens = std::move(Expanded);
714 assert(!Result.ExpandedTokens.empty());
715 assert(Result.ExpandedTokens.back().kind() == tok::eof);
718 buildSpelledTokens();
724 while (NextExpanded < Result.ExpandedTokens.size() - 1 ) {
730 unsigned OldPosition = NextExpanded;
732 if (NextExpanded == OldPosition)
733 diagnoseAdvanceFailure();
737 for (
const auto &
File : Result.Files)
741 for (
auto &pair : Result.Files) {
742 auto &mappings = pair.second.Mappings;
743 assert(llvm::is_sorted(mappings, [](
const TokenBuffer::Mapping &M1,
744 const TokenBuffer::Mapping &M2) {
745 return M1.BeginSpelled < M2.BeginSpelled &&
746 M1.EndSpelled < M2.EndSpelled &&
747 M1.BeginExpanded < M2.BeginExpanded &&
748 M1.EndExpanded < M2.EndExpanded;
753 return std::move(Result);
761 void discard(std::optional<FileID> Drain = std::nullopt) {
763 Drain ?
SM.getLocForEndOfFile(*Drain)
764 :
SM.getExpansionLoc(
765 Result.ExpandedTokens[NextExpanded].location());
767 const auto &SpelledTokens = Result.Files[
File].SpelledTokens;
768 auto &NextSpelled = this->NextSpelled[
File];
770 TokenBuffer::Mapping Mapping;
771 Mapping.BeginSpelled = NextSpelled;
774 Mapping.BeginExpanded = Mapping.EndExpanded =
775 Drain ? Result.Files[*Drain].EndExpanded : NextExpanded;
778 auto FlushMapping = [&,
this] {
779 Mapping.EndSpelled = NextSpelled;
780 if (Mapping.BeginSpelled != Mapping.EndSpelled)
781 Result.Files[
File].Mappings.push_back(Mapping);
782 Mapping.BeginSpelled = NextSpelled;
785 while (NextSpelled < SpelledTokens.size() &&
786 SpelledTokens[NextSpelled].location() <
Target) {
791 CollectedExpansions.lookup(SpelledTokens[NextSpelled].location());
794 while (NextSpelled < SpelledTokens.size() &&
795 SpelledTokens[NextSpelled].location() <= KnownEnd)
810 const syntax::Token &Tok = Result.ExpandedTokens[NextExpanded];
813 const auto &SpelledTokens = Result.Files[
File].SpelledTokens;
814 auto &NextSpelled = this->NextSpelled[
File];
818 while (NextSpelled < SpelledTokens.size() &&
819 NextExpanded < Result.ExpandedTokens.size() &&
820 SpelledTokens[NextSpelled].location() ==
821 Result.ExpandedTokens[NextExpanded].location()) {
828 auto End = CollectedExpansions.lookup(Expansion);
829 assert(End.isValid() &&
"Macro expansion wasn't captured?");
832 TokenBuffer::Mapping Mapping;
833 Mapping.BeginExpanded = NextExpanded;
834 Mapping.BeginSpelled = NextSpelled;
836 while (NextSpelled < SpelledTokens.size() &&
837 SpelledTokens[NextSpelled].location() <= End)
840 while (NextExpanded < Result.ExpandedTokens.size() &&
842 Result.ExpandedTokens[NextExpanded].location()) == Expansion)
845 Mapping.EndExpanded = NextExpanded;
846 Mapping.EndSpelled = NextSpelled;
847 Result.Files[
File].Mappings.push_back(Mapping);
852 void diagnoseAdvanceFailure() {
855 for (
unsigned I = (NextExpanded < 10) ? 0 : NextExpanded - 10;
856 I < NextExpanded + 5 && I < Result.ExpandedTokens.size(); ++I) {
858 (I == NextExpanded) ?
"!! " : (I < NextExpanded) ?
"ok " :
" ";
859 llvm::errs() << L << Result.ExpandedTokens[I].dumpForTests(
SM) <<
"\n";
862 llvm_unreachable(
"Couldn't map expanded token to spelled tokens!");
867 void buildSpelledTokens() {
868 for (
unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) {
869 const auto &Tok = Result.ExpandedTokens[I];
870 auto FID =
SM.getFileID(
SM.getExpansionLoc(Tok.
location()));
871 auto It = Result.Files.try_emplace(FID);
872 TokenBuffer::MarkedFile &
File = It.first->second;
875 File.EndExpanded = Tok.
kind() == tok::eof ? I : I + 1;
880 File.BeginExpanded = I;
886 unsigned NextExpanded = 0;
887 llvm::DenseMap<FileID, unsigned> NextSpelled;
888 PPExpansions CollectedExpansions;
894 PP.setTokenWatcher(
nullptr);
895 Collector->disable();
896 return Builder(std::move(Expanded), std::move(Expansions),
897 PP.getSourceManager(), PP.getLangOpts())
902 return std::string(llvm::formatv(
"Token({0}, length = {1})",
907 return std::string(llvm::formatv(
"Token(`{0}`, {1}, length = {2})", text(
SM),
913 if (
T.kind() == tok::eof)
915 return std::string(
T.text(*SourceMgr));
918 auto DumpTokens = [
this, &PrintToken](llvm::raw_ostream &OS,
920 if (Tokens.empty()) {
924 OS << Tokens[0].text(*SourceMgr);
925 for (
unsigned I = 1; I < Tokens.size(); ++I) {
926 if (Tokens[I].kind() == tok::eof)
928 OS <<
" " << PrintToken(Tokens[I]);
933 llvm::raw_string_ostream OS(Dump);
935 OS <<
"expanded tokens:\n"
941 std::vector<FileID> Keys;
942 for (
const auto &F : Files)
943 Keys.push_back(F.first);
947 const MarkedFile &
File = Files.find(ID)->second;
951 std::string
Path = llvm::sys::path::convert_to_slash(Entry->getName());
952 OS << llvm::formatv(
"file '{0}'\n",
Path) <<
" spelled tokens:\n"
954 DumpTokens(OS,
File.SpelledTokens);
957 if (
File.Mappings.empty()) {
958 OS <<
" no mappings.\n";
961 OS <<
" mappings:\n";
962 for (
auto &M :
File.Mappings) {
964 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",
965 PrintToken(
File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,
966 M.EndSpelled ==
File.SpelledTokens.size()
968 : PrintToken(
File.SpelledTokens[M.EndSpelled]),
969 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),
970 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),
Defines the Diagnostic-related interfaces.
static Decl::Kind getKind(const Decl *D)
Defines the clang::IdentifierInfo, clang::IdentifierTable, and clang::Selector interfaces.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines the clang::LangOptions interface.
llvm::MachO::Target Target
Defines the PPCallbacks interface.
static ParseState advance(ParseState S, size_t N)
Defines the clang::Preprocessor interface.
Defines the clang::SourceLocation class and associated facilities.
Defines the SourceManager interface.
Defines the clang::TokenKind enum and support functions.
Builds mappings and spelled tokens in the TokenBuffer based on the expanded token stream.
Builder(std::vector< syntax::Token > Expanded, PPExpansions CollectedExpansions, const SourceManager &SM, const LangOptions &LangOpts)
Records information reqired to construct mappings for the token buffer that we are collecting.
CollectPPExpansions(TokenCollector &C)
void disable()
Disabled instance will stop reporting anything to TokenCollector.
void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, SourceRange Range, const MacroArgs *Args) override
Called by Preprocessor::HandleMacroExpandedIdentifier when a macro invocation is found.
Represents a character-granular source range.
An opaque identifier used by SourceManager which refers to a source file (MemoryBuffer) along with it...
unsigned getHashValue() const
One of these records is kept for each identifier that is lexed.
tok::TokenKind getTokenID() const
If this is a source-language token (e.g.
Implements an efficient mapping from strings to IdentifierInfo nodes.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Keeps track of the various options that can be enabled, which controls the dialect of C or C++ that i...
Lexer - This provides a simple interface that turns a text buffer into a stream of tokens.
bool LexFromRawLexer(Token &Result)
LexFromRawLexer - Lex a token from a designated raw lexer (one with no associated preprocessor object...
unsigned getCurrentBufferOffset()
Returns the current lexing offset.
MacroArgs - An instance of this class captures information about the formal arguments specified to a ...
A description of the current definition of a macro.
This interface provides a way to observe the actions of the preprocessor as it does its thing.
Engages in a tight little dance with the lexer to efficiently preprocess tokens.
void addPPCallbacks(std::unique_ptr< PPCallbacks > C)
SourceManager & getSourceManager() const
void setTokenWatcher(llvm::unique_function< void(const clang::Token &)> F)
Register a function that would be called on each token in the final expanded token stream.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
This class handles loading and caching of source files into memory.
FileID getFileID(SourceLocation SpellingLoc) const
Return the FileID for a SourceLocation.
OptionalFileEntryRef getFileEntryRefForID(FileID FID) const
Returns the FileEntryRef for the provided FileID.
bool isMacroArgExpansion(SourceLocation Loc, SourceLocation *StartLoc=nullptr) const
Tests whether the given source location represents a macro argument's expansion into the function-lik...
SourceLocation getExpansionLoc(SourceLocation Loc) const
Given a SourceLocation object Loc, return the expansion location referenced by the ID.
A trivial tuple used to represent a source range.
void setBegin(SourceLocation b)
SourceLocation getEnd() const
SourceLocation getBegin() const
Token - This structure provides full information about a lexed token.
A list of tokens obtained by preprocessing a text buffer and operations to map between the expanded a...
const syntax::Token * spelledTokenContaining(SourceLocation Loc) const
Returns the spelled Token containing the Loc, if there are no such tokens returns nullptr.
const SourceManager & sourceManager() const
void indexExpandedTokens()
Builds a cache to make future calls to expandedToken(SourceRange) faster.
llvm::SmallVector< llvm::ArrayRef< syntax::Token >, 1 > expandedForSpelled(llvm::ArrayRef< syntax::Token > Spelled) const
Find the subranges of expanded tokens, corresponding to Spelled.
llvm::ArrayRef< syntax::Token > expandedTokens() const
All tokens produced by the preprocessor after all macro replacements, directives, etc.
std::string dumpForTests() const
std::optional< llvm::ArrayRef< syntax::Token > > spelledForExpanded(llvm::ArrayRef< syntax::Token > Expanded) const
Returns the subrange of spelled tokens corresponding to AST node spanning Expanded.
std::vector< Expansion > expansionsOverlapping(llvm::ArrayRef< syntax::Token > Spelled) const
Returns all expansions (partially) expanded from the specified tokens.
std::optional< Expansion > expansionStartingAt(const syntax::Token *Spelled) const
If Spelled starts a mapping (e.g.
llvm::ArrayRef< syntax::Token > spelledTokens(FileID FID) const
Lexed tokens of a file before preprocessing.
std::vector< const syntax::Token * > macroExpansions(FileID FID) const
Get all tokens that expand a macro in FID.
Collects tokens for the main file while running the frontend action.
TokenBuffer consume() &&
Finalizes token collection.
TokenCollector(Preprocessor &P)
Adds the hooks to collect the tokens.
A token coming directly from a file or from a macro invocation.
std::string str() const
For debugging purposes.
llvm::StringRef text(const SourceManager &SM) const
Get the substring covered by the token.
tok::TokenKind kind() const
FileRange range(const SourceManager &SM) const
Gets a range of this token.
Token(SourceLocation Location, unsigned Length, tok::TokenKind Kind)
std::string dumpForTests(const SourceManager &SM) const
SourceLocation location() const
Location of the first character of a token.
bool Dec(InterpState &S, CodePtr OpPC, bool CanOverflow)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
const syntax::Token * spelledIdentifierTouching(SourceLocation Loc, llvm::ArrayRef< syntax::Token > Tokens)
The identifier token that overlaps or touches a spelling location Loc.
std::vector< syntax::Token > tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO)
Lex the text buffer, corresponding to FID, in raw mode and record the resulting spelled tokens.
raw_ostream & operator<<(raw_ostream &OS, NodeKind K)
For debugging purposes.
llvm::ArrayRef< syntax::Token > spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens)
The spelled tokens that overlap or touch a spelling location Loc.
const char * getTokenName(TokenKind Kind) LLVM_READNONE
Determines the name of a token as used within the front end.
TokenKind
Provides a simple uniform namespace for tokens from all C languages.
The JSON file list parser is used to communicate input to InstallAPI.
const FunctionProtoType * T
float __ovld __cnfn length(float)
Return the length of vector p, i.e., sqrt(p.x2 + p.y 2 + ...)
A half-open character range inside a particular file, the start offset is included and the end offset...
CharSourceRange toCharRange(const SourceManager &SM) const
Convert to the clang range.
FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)
EXPECTS: File.isValid() && Begin <= End.
unsigned beginOffset() const
Start is a start offset (inclusive) in the corresponding file.
llvm::StringRef text(const SourceManager &SM) const
Gets the substring that this FileRange refers to.
unsigned endOffset() const
End offset (exclusive) in the corresponding file.
An expansion produced by the preprocessor, includes macro expansions and preprocessor directives.