summaryrefslogtreecommitdiffstats
path: root/lib/Lex/TokenLexer.cpp
diff options
context:
space:
mode:
authorArgyrios Kyrtzidis <akyrtzi@gmail.com>2011-08-23 21:02:41 +0000
committerArgyrios Kyrtzidis <akyrtzi@gmail.com>2011-08-23 21:02:41 +0000
commitb6c465e17ec37390667223a18a340e8652c212ff (patch)
tree9ae9d25f5e4a683e853589f1415a5aa61d080ec0 /lib/Lex/TokenLexer.cpp
parent499ea5550d6e2fc5cfbd33b47f06d92ce25d7a13 (diff)
Amend r138129 (reduction of SLocEntries) which introduced performance regression due
to increased calls to SourceManager::getFileID. (rdar://9992664) Use a slightly different approach that is more efficient both in terms of speed (no extra getFileID calls) and in SLocEntries reduction. Comparing pre-r138129 and this patch we get: For compiling SemaExpr.cpp reduction of SLocEntries by 26%. For the boost enum library: -SLocEntries -34% (note that this was -5% for r138129) -Memory consumption -50% -PCH size -31% Reduced SLocEntries also benefit the hot function SourceManager::getFileID, evident by the reduced "FileID scans". git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@138380 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Lex/TokenLexer.cpp')
-rw-r--r--lib/Lex/TokenLexer.cpp64
1 files changed, 46 insertions, 18 deletions
diff --git a/lib/Lex/TokenLexer.cpp b/lib/Lex/TokenLexer.cpp
index 85c03f4e96..80f6ad1449 100644
--- a/lib/Lex/TokenLexer.cpp
+++ b/lib/Lex/TokenLexer.cpp
@@ -669,33 +669,51 @@ static void updateConsecutiveMacroArgTokens(SourceManager &SM,
Token *&begin_tokens,
Token * end_tokens) {
assert(begin_tokens < end_tokens);
- Token &FirstTok = *begin_tokens;
- FileID SpellFID = SM.getFileID(FirstTok.getLocation());
- // Look for the first token that is not from the same FileID.
- Token *NextFIDTok = begin_tokens + 1;
- for (; NextFIDTok < end_tokens; ++NextFIDTok)
- if (!SM.isInFileID(NextFIDTok->getLocation(), SpellFID))
+ SourceLocation FirstLoc = begin_tokens->getLocation();
+ SourceLocation CurLoc = FirstLoc;
+
+ // Compare the source location offset of tokens and group together tokens that
+ // are close, even if their locations point to different FileIDs. e.g.
+ //
+ // |bar | foo | cake | (3 tokens from 3 consecutive FileIDs)
+ // ^ ^
+ // |bar foo cake| (one SLocEntry chunk for all tokens)
+ //
+ // we can perform this "merge" since the token's spelling location depends
+ // on the relative offset.
+
+ Token *NextTok = begin_tokens + 1;
+ for (; NextTok < end_tokens; ++NextTok) {
+ int RelOffs;
+ if (!SM.isInSameSLocAddrSpace(CurLoc, NextTok->getLocation(), &RelOffs))
+ break; // Token from different local/loaded location.
+ // Check that token is not before the previous token or more than 50
+ // "characters" away.
+ if (RelOffs < 0 || RelOffs > 50)
break;
+ CurLoc = NextTok->getLocation();
+ }
// For the consecutive tokens, find the length of the SLocEntry to contain
// all of them.
- unsigned FirstOffs, LastOffs;
- SM.isInFileID(FirstTok.getLocation(), SpellFID, &FirstOffs);
- SM.isInFileID((NextFIDTok-1)->getLocation(), SpellFID, &LastOffs);
- unsigned FullLength = (LastOffs - FirstOffs) + (NextFIDTok-1)->getLength();
+ Token &LastConsecutiveTok = *(NextTok-1);
+ int LastRelOffs;
+ SM.isInSameSLocAddrSpace(FirstLoc, LastConsecutiveTok.getLocation(),
+ &LastRelOffs);
+ unsigned FullLength = LastRelOffs + LastConsecutiveTok.getLength();
// Create a macro expansion SLocEntry that will "contain" all of the tokens.
SourceLocation Expansion =
- SM.createMacroArgExpansionLoc(FirstTok.getLocation(), InstLoc,FullLength);
+ SM.createMacroArgExpansionLoc(FirstLoc, InstLoc,FullLength);
// Change the location of the tokens from the spelling location to the new
// expanded location.
- for (; begin_tokens < NextFIDTok; ++begin_tokens) {
+ for (; begin_tokens < NextTok; ++begin_tokens) {
Token &Tok = *begin_tokens;
- unsigned Offs;
- SM.isInFileID(Tok.getLocation(), SpellFID, &Offs);
- Tok.setLocation(Expansion.getFileLocWithOffset(Offs - FirstOffs));
+ int RelOffs;
+ SM.isInSameSLocAddrSpace(FirstLoc, Tok.getLocation(), &RelOffs);
+ Tok.setLocation(Expansion.getFileLocWithOffset(RelOffs));
}
}
@@ -710,9 +728,19 @@ void TokenLexer::updateLocForMacroArgTokens(SourceLocation ArgIdSpellLoc,
Token *end_tokens) {
SourceManager &SM = PP.getSourceManager();
- SourceLocation curInst =
+ SourceLocation InstLoc =
getExpansionLocForMacroDefLoc(ArgIdSpellLoc);
- while (begin_tokens < end_tokens)
- updateConsecutiveMacroArgTokens(SM, curInst, begin_tokens, end_tokens);
+ while (begin_tokens < end_tokens) {
+ // If there's only one token just create a SLocEntry for it.
+ if (end_tokens - begin_tokens == 1) {
+ Token &Tok = *begin_tokens;
+ Tok.setLocation(SM.createMacroArgExpansionLoc(Tok.getLocation(),
+ InstLoc,
+ Tok.getLength()));
+ return;
+ }
+
+ updateConsecutiveMacroArgTokens(SM, InstLoc, begin_tokens, end_tokens);
+ }
}