summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/angle/src/compiler/preprocessor/new
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/angle/src/compiler/preprocessor/new')
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.cpp127
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.h87
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.cpp16
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.h43
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.cpp932
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.h82
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.h34
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.y279
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Input.cpp55
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Input.h48
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.cpp16
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.h25
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Macro.cpp23
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Macro.h44
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.cpp370
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.h75
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.cpp142
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.h49
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/SourceLocation.h38
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Token.cpp83
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Token.h106
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.h58
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.l340
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/numeric_lex.h61
-rw-r--r--src/3rdparty/angle/src/compiler/preprocessor/new/pp_utils.h18
25 files changed, 3151 insertions, 0 deletions
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.cpp
new file mode 100644
index 0000000000..3f50dfc98a
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.cpp
@@ -0,0 +1,127 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "Diagnostics.h"
+
+#include <cassert>
+
+namespace pp
+{
+
+Diagnostics::~Diagnostics()
+{
+}
+
+void Diagnostics::report(ID id,
+ const SourceLocation& loc,
+ const std::string& text)
+{
+ // TODO(alokp): Keep a count of errors and warnings.
+ print(id, loc, text);
+}
+
+Diagnostics::Severity Diagnostics::severity(ID id)
+{
+ if ((id > ERROR_BEGIN) && (id < ERROR_END))
+ return ERROR;
+
+ if ((id > WARNING_BEGIN) && (id < WARNING_END))
+ return WARNING;
+
+ assert(false);
+ return ERROR;
+}
+
+std::string Diagnostics::message(ID id)
+{
+ switch (id)
+ {
+ // Errors begin.
+ case INTERNAL_ERROR:
+ return "internal error";
+ case OUT_OF_MEMORY:
+ return "out of memory";
+ case INVALID_CHARACTER:
+ return "invalid character";
+ case INVALID_NUMBER:
+ return "invalid number";
+ case INTEGER_OVERFLOW:
+ return "integer overflow";
+ case FLOAT_OVERFLOW:
+ return "float overflow";
+ case TOKEN_TOO_LONG:
+ return "token too long";
+ case INVALID_EXPRESSION:
+ return "invalid expression";
+ case DIVISION_BY_ZERO:
+ return "division by zero";
+ case EOF_IN_COMMENT:
+ return "unexpected end of file found in comment";
+ case UNEXPECTED_TOKEN:
+ return "unexpected token";
+ case DIRECTIVE_INVALID_NAME:
+ return "invalid directive name";
+ case MACRO_NAME_RESERVED:
+ return "macro name is reserved";
+ case MACRO_REDEFINED:
+ return "macro redefined";
+ case MACRO_PREDEFINED_REDEFINED:
+ return "predefined macro redefined";
+ case MACRO_PREDEFINED_UNDEFINED:
+ return "predefined macro undefined";
+ case MACRO_UNTERMINATED_INVOCATION:
+ return "unterminated macro invocation";
+ case MACRO_TOO_FEW_ARGS:
+ return "Not enough arguments for macro";
+ case MACRO_TOO_MANY_ARGS:
+ return "Too many arguments for macro";
+ case CONDITIONAL_ENDIF_WITHOUT_IF:
+ return "unexpected #endif found without a matching #if";
+ case CONDITIONAL_ELSE_WITHOUT_IF:
+ return "unexpected #else found without a matching #if";
+ case CONDITIONAL_ELSE_AFTER_ELSE:
+ return "unexpected #else found after another #else";
+ case CONDITIONAL_ELIF_WITHOUT_IF:
+ return "unexpected #elif found without a matching #if";
+ case CONDITIONAL_ELIF_AFTER_ELSE:
+ return "unexpected #elif found after #else";
+ case CONDITIONAL_UNTERMINATED:
+ return "unexpected end of file found in conditional block";
+ case INVALID_EXTENSION_NAME:
+ return "invalid extension name";
+ case INVALID_EXTENSION_BEHAVIOR:
+ return "invalid extension behavior";
+ case INVALID_EXTENSION_DIRECTIVE:
+ return "invalid extension directive";
+ case INVALID_VERSION_NUMBER:
+ return "invalid version number";
+ case INVALID_VERSION_DIRECTIVE:
+ return "invalid version directive";
+ case VERSION_NOT_FIRST_STATEMENT:
+ return "#version directive must occur before anything else, "
+ "except for comments and white space";
+ case INVALID_LINE_NUMBER:
+ return "invalid line number";
+ case INVALID_FILE_NUMBER:
+ return "invalid file number";
+ case INVALID_LINE_DIRECTIVE:
+ return "invalid line directive";
+ // Errors end.
+ // Warnings begin.
+ case EOF_IN_DIRECTIVE:
+ return "unexpected end of file found in directive";
+ case CONDITIONAL_UNEXPECTED_TOKEN:
+ return "unexpected token after conditional expression";
+ case UNRECOGNIZED_PRAGMA:
+ return "unrecognized pragma";
+ // Warnings end.
+ default:
+ assert(false);
+ return "";
+ }
+}
+
+} // namespace pp
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.h
new file mode 100644
index 0000000000..07bc411846
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Diagnostics.h
@@ -0,0 +1,87 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_DIAGNOSTICS_H_
+#define COMPILER_PREPROCESSOR_DIAGNOSTICS_H_
+
+#include <string>
+
+namespace pp
+{
+
+struct SourceLocation;
+
+// Base class for reporting diagnostic messages.
+// Derived classes are responsible for formatting and printing the messages.
+class Diagnostics
+{
+ public:
+ enum Severity
+ {
+ ERROR,
+ WARNING
+ };
+ enum ID
+ {
+ ERROR_BEGIN,
+ INTERNAL_ERROR,
+ OUT_OF_MEMORY,
+ INVALID_CHARACTER,
+ INVALID_NUMBER,
+ INTEGER_OVERFLOW,
+ FLOAT_OVERFLOW,
+ TOKEN_TOO_LONG,
+ INVALID_EXPRESSION,
+ DIVISION_BY_ZERO,
+ EOF_IN_COMMENT,
+ UNEXPECTED_TOKEN,
+ DIRECTIVE_INVALID_NAME,
+ MACRO_NAME_RESERVED,
+ MACRO_REDEFINED,
+ MACRO_PREDEFINED_REDEFINED,
+ MACRO_PREDEFINED_UNDEFINED,
+ MACRO_UNTERMINATED_INVOCATION,
+ MACRO_TOO_FEW_ARGS,
+ MACRO_TOO_MANY_ARGS,
+ CONDITIONAL_ENDIF_WITHOUT_IF,
+ CONDITIONAL_ELSE_WITHOUT_IF,
+ CONDITIONAL_ELSE_AFTER_ELSE,
+ CONDITIONAL_ELIF_WITHOUT_IF,
+ CONDITIONAL_ELIF_AFTER_ELSE,
+ CONDITIONAL_UNTERMINATED,
+ INVALID_EXTENSION_NAME,
+ INVALID_EXTENSION_BEHAVIOR,
+ INVALID_EXTENSION_DIRECTIVE,
+ INVALID_VERSION_NUMBER,
+ INVALID_VERSION_DIRECTIVE,
+ VERSION_NOT_FIRST_STATEMENT,
+ INVALID_LINE_NUMBER,
+ INVALID_FILE_NUMBER,
+ INVALID_LINE_DIRECTIVE,
+ ERROR_END,
+
+ WARNING_BEGIN,
+ EOF_IN_DIRECTIVE,
+ CONDITIONAL_UNEXPECTED_TOKEN,
+ UNRECOGNIZED_PRAGMA,
+ WARNING_END
+ };
+
+ virtual ~Diagnostics();
+
+ void report(ID id, const SourceLocation& loc, const std::string& text);
+
+ protected:
+ Severity severity(ID id);
+ std::string message(ID id);
+
+ virtual void print(ID id,
+ const SourceLocation& loc,
+ const std::string& text) = 0;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_DIAGNOSTICS_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.cpp
new file mode 100644
index 0000000000..ca91e1c71b
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.cpp
@@ -0,0 +1,16 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "DirectiveHandler.h"
+
+namespace pp
+{
+
+DirectiveHandler::~DirectiveHandler()
+{
+}
+
+} // namespace pp
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.h b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.h
new file mode 100644
index 0000000000..2aaeec2818
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveHandler.h
@@ -0,0 +1,43 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_DIRECTIVE_HANDLER_H_
+#define COMPILER_PREPROCESSOR_DIRECTIVE_HANDLER_H_
+
+#include <string>
+
+namespace pp
+{
+
+struct SourceLocation;
+
+// Base class for handling directives.
+// Preprocessor uses this class to notify the clients about certain
+// preprocessor directives. Derived classes are responsible for
+// handling them in an appropriate manner.
+class DirectiveHandler
+{
+ public:
+ virtual ~DirectiveHandler();
+
+ virtual void handleError(const SourceLocation& loc,
+ const std::string& msg) = 0;
+
+ // Handle pragma of form: #pragma name[(value)]
+ virtual void handlePragma(const SourceLocation& loc,
+ const std::string& name,
+ const std::string& value) = 0;
+
+ virtual void handleExtension(const SourceLocation& loc,
+ const std::string& name,
+ const std::string& behavior) = 0;
+
+ virtual void handleVersion(const SourceLocation& loc,
+ int version) = 0;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_DIRECTIVE_HANDLER_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.cpp
new file mode 100644
index 0000000000..f2e42d06bf
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.cpp
@@ -0,0 +1,932 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "DirectiveParser.h"
+
+#include <cassert>
+#include <cstdlib>
+#include <sstream>
+
+#include "Diagnostics.h"
+#include "DirectiveHandler.h"
+#include "ExpressionParser.h"
+#include "MacroExpander.h"
+#include "Token.h"
+#include "Tokenizer.h"
+
+namespace {
+enum DirectiveType
+{
+ DIRECTIVE_NONE,
+ DIRECTIVE_DEFINE,
+ DIRECTIVE_UNDEF,
+ DIRECTIVE_IF,
+ DIRECTIVE_IFDEF,
+ DIRECTIVE_IFNDEF,
+ DIRECTIVE_ELSE,
+ DIRECTIVE_ELIF,
+ DIRECTIVE_ENDIF,
+ DIRECTIVE_ERROR,
+ DIRECTIVE_PRAGMA,
+ DIRECTIVE_EXTENSION,
+ DIRECTIVE_VERSION,
+ DIRECTIVE_LINE
+};
+} // namespace
+
+static DirectiveType getDirective(const pp::Token* token)
+{
+ static const std::string kDirectiveDefine("define");
+ static const std::string kDirectiveUndef("undef");
+ static const std::string kDirectiveIf("if");
+ static const std::string kDirectiveIfdef("ifdef");
+ static const std::string kDirectiveIfndef("ifndef");
+ static const std::string kDirectiveElse("else");
+ static const std::string kDirectiveElif("elif");
+ static const std::string kDirectiveEndif("endif");
+ static const std::string kDirectiveError("error");
+ static const std::string kDirectivePragma("pragma");
+ static const std::string kDirectiveExtension("extension");
+ static const std::string kDirectiveVersion("version");
+ static const std::string kDirectiveLine("line");
+
+ if (token->type != pp::Token::IDENTIFIER)
+ return DIRECTIVE_NONE;
+
+ if (token->text == kDirectiveDefine)
+ return DIRECTIVE_DEFINE;
+ else if (token->text == kDirectiveUndef)
+ return DIRECTIVE_UNDEF;
+ else if (token->text == kDirectiveIf)
+ return DIRECTIVE_IF;
+ else if (token->text == kDirectiveIfdef)
+ return DIRECTIVE_IFDEF;
+ else if (token->text == kDirectiveIfndef)
+ return DIRECTIVE_IFNDEF;
+ else if (token->text == kDirectiveElse)
+ return DIRECTIVE_ELSE;
+ else if (token->text == kDirectiveElif)
+ return DIRECTIVE_ELIF;
+ else if (token->text == kDirectiveEndif)
+ return DIRECTIVE_ENDIF;
+ else if (token->text == kDirectiveError)
+ return DIRECTIVE_ERROR;
+ else if (token->text == kDirectivePragma)
+ return DIRECTIVE_PRAGMA;
+ else if (token->text == kDirectiveExtension)
+ return DIRECTIVE_EXTENSION;
+ else if (token->text == kDirectiveVersion)
+ return DIRECTIVE_VERSION;
+ else if (token->text == kDirectiveLine)
+ return DIRECTIVE_LINE;
+
+ return DIRECTIVE_NONE;
+}
+
+static bool isConditionalDirective(DirectiveType directive)
+{
+ switch (directive)
+ {
+ case DIRECTIVE_IF:
+ case DIRECTIVE_IFDEF:
+ case DIRECTIVE_IFNDEF:
+ case DIRECTIVE_ELSE:
+ case DIRECTIVE_ELIF:
+ case DIRECTIVE_ENDIF:
+ return true;
+ default:
+ return false;
+ }
+}
+
+// Returns true if the token represents End Of Directive.
+static bool isEOD(const pp::Token* token)
+{
+ return (token->type == '\n') || (token->type == pp::Token::LAST);
+}
+
+static void skipUntilEOD(pp::Lexer* lexer, pp::Token* token)
+{
+ while(!isEOD(token))
+ {
+ lexer->lex(token);
+ }
+}
+
+static bool isMacroNameReserved(const std::string& name)
+{
+ // Names prefixed with "GL_" are reserved.
+ if (name.substr(0, 3) == "GL_")
+ return true;
+
+ // Names containing two consecutive underscores are reserved.
+ if (name.find("__") != std::string::npos)
+ return true;
+
+ return false;
+}
+
+static bool isMacroPredefined(const std::string& name,
+ const pp::MacroSet& macroSet)
+{
+ pp::MacroSet::const_iterator iter = macroSet.find(name);
+ return iter != macroSet.end() ? iter->second.predefined : false;
+}
+
+namespace pp
+{
+
+class DefinedParser : public Lexer
+{
+ public:
+ DefinedParser(Lexer* lexer,
+ const MacroSet* macroSet,
+ Diagnostics* diagnostics) :
+ mLexer(lexer),
+ mMacroSet(macroSet),
+ mDiagnostics(diagnostics)
+ {
+ }
+
+ protected:
+ virtual void lex(Token* token)
+ {
+ static const std::string kDefined("defined");
+
+ mLexer->lex(token);
+ if (token->type != Token::IDENTIFIER)
+ return;
+ if (token->text != kDefined)
+ return;
+
+ bool paren = false;
+ mLexer->lex(token);
+ if (token->type == '(')
+ {
+ paren = true;
+ mLexer->lex(token);
+ }
+
+ if (token->type != Token::IDENTIFIER)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mLexer, token);
+ return;
+ }
+ MacroSet::const_iterator iter = mMacroSet->find(token->text);
+ std::string expression = iter != mMacroSet->end() ? "1" : "0";
+
+ if (paren)
+ {
+ mLexer->lex(token);
+ if (token->type != ')')
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mLexer, token);
+ return;
+ }
+ }
+
+ // We have a valid defined operator.
+ // Convert the current token into a CONST_INT token.
+ token->type = Token::CONST_INT;
+ token->text = expression;
+ }
+
+ private:
+ Lexer* mLexer;
+ const MacroSet* mMacroSet;
+ Diagnostics* mDiagnostics;
+};
+
+DirectiveParser::DirectiveParser(Tokenizer* tokenizer,
+ MacroSet* macroSet,
+ Diagnostics* diagnostics,
+ DirectiveHandler* directiveHandler) :
+ mPastFirstStatement(false),
+ mTokenizer(tokenizer),
+ mMacroSet(macroSet),
+ mDiagnostics(diagnostics),
+ mDirectiveHandler(directiveHandler)
+{
+}
+
+void DirectiveParser::lex(Token* token)
+{
+ do
+ {
+ mTokenizer->lex(token);
+
+ if (token->type == Token::PP_HASH)
+ {
+ parseDirective(token);
+ mPastFirstStatement = true;
+ }
+
+ if (token->type == Token::LAST)
+ {
+ if (!mConditionalStack.empty())
+ {
+ const ConditionalBlock& block = mConditionalStack.back();
+ mDiagnostics->report(Diagnostics::CONDITIONAL_UNTERMINATED,
+ block.location, block.type);
+ }
+ break;
+ }
+
+ } while (skipping() || (token->type == '\n'));
+
+ mPastFirstStatement = true;
+}
+
+void DirectiveParser::parseDirective(Token* token)
+{
+ assert(token->type == Token::PP_HASH);
+
+ mTokenizer->lex(token);
+ if (isEOD(token))
+ {
+ // Empty Directive.
+ return;
+ }
+
+ DirectiveType directive = getDirective(token);
+
+ // While in an excluded conditional block/group,
+ // we only parse conditional directives.
+ if (skipping() && !isConditionalDirective(directive))
+ {
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ switch(directive)
+ {
+ case DIRECTIVE_NONE:
+ mDiagnostics->report(Diagnostics::DIRECTIVE_INVALID_NAME,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ break;
+ case DIRECTIVE_DEFINE:
+ parseDefine(token);
+ break;
+ case DIRECTIVE_UNDEF:
+ parseUndef(token);
+ break;
+ case DIRECTIVE_IF:
+ parseIf(token);
+ break;
+ case DIRECTIVE_IFDEF:
+ parseIfdef(token);
+ break;
+ case DIRECTIVE_IFNDEF:
+ parseIfndef(token);
+ break;
+ case DIRECTIVE_ELSE:
+ parseElse(token);
+ break;
+ case DIRECTIVE_ELIF:
+ parseElif(token);
+ break;
+ case DIRECTIVE_ENDIF:
+ parseEndif(token);
+ break;
+ case DIRECTIVE_ERROR:
+ parseError(token);
+ break;
+ case DIRECTIVE_PRAGMA:
+ parsePragma(token);
+ break;
+ case DIRECTIVE_EXTENSION:
+ parseExtension(token);
+ break;
+ case DIRECTIVE_VERSION:
+ parseVersion(token);
+ break;
+ case DIRECTIVE_LINE:
+ parseLine(token);
+ break;
+ default:
+ assert(false);
+ break;
+ }
+
+ skipUntilEOD(mTokenizer, token);
+ if (token->type == Token::LAST)
+ {
+ mDiagnostics->report(Diagnostics::EOF_IN_DIRECTIVE,
+ token->location, token->text);
+ }
+}
+
+void DirectiveParser::parseDefine(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_DEFINE);
+
+ mTokenizer->lex(token);
+ if (token->type != Token::IDENTIFIER)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ return;
+ }
+ if (isMacroPredefined(token->text, *mMacroSet))
+ {
+ mDiagnostics->report(Diagnostics::MACRO_PREDEFINED_REDEFINED,
+ token->location, token->text);
+ return;
+ }
+ if (isMacroNameReserved(token->text))
+ {
+ mDiagnostics->report(Diagnostics::MACRO_NAME_RESERVED,
+ token->location, token->text);
+ return;
+ }
+
+ Macro macro;
+ macro.type = Macro::kTypeObj;
+ macro.name = token->text;
+
+ mTokenizer->lex(token);
+ if (token->type == '(' && !token->hasLeadingSpace())
+ {
+ // Function-like macro. Collect arguments.
+ macro.type = Macro::kTypeFunc;
+ do {
+ mTokenizer->lex(token);
+ if (token->type != Token::IDENTIFIER)
+ break;
+ macro.parameters.push_back(token->text);
+
+ mTokenizer->lex(token); // Get ','.
+ } while (token->type == ',');
+
+ if (token->type != ')')
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location,
+ token->text);
+ return;
+ }
+ mTokenizer->lex(token); // Get ')'.
+ }
+
+ while ((token->type != '\n') && (token->type != Token::LAST))
+ {
+ // Reset the token location because it is unnecessary in replacement
+ // list. Resetting it also allows us to reuse Token::equals() to
+ // compare macros.
+ token->location = SourceLocation();
+ macro.replacements.push_back(*token);
+ mTokenizer->lex(token);
+ }
+ if (!macro.replacements.empty())
+ {
+ // Whitespace preceding the replacement list is not considered part of
+ // the replacement list for either form of macro.
+ macro.replacements.front().setHasLeadingSpace(false);
+ }
+
+ // Check for macro redefinition.
+ MacroSet::const_iterator iter = mMacroSet->find(macro.name);
+ if (iter != mMacroSet->end() && !macro.equals(iter->second))
+ {
+ mDiagnostics->report(Diagnostics::MACRO_REDEFINED,
+ token->location,
+ macro.name);
+ return;
+ }
+ mMacroSet->insert(std::make_pair(macro.name, macro));
+}
+
+void DirectiveParser::parseUndef(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_UNDEF);
+
+ mTokenizer->lex(token);
+ if (token->type != Token::IDENTIFIER)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ return;
+ }
+
+ MacroSet::iterator iter = mMacroSet->find(token->text);
+ if (iter != mMacroSet->end())
+ {
+ if (iter->second.predefined)
+ {
+ mDiagnostics->report(Diagnostics::MACRO_PREDEFINED_UNDEFINED,
+ token->location, token->text);
+ }
+ else
+ {
+ mMacroSet->erase(iter);
+ }
+ }
+
+ mTokenizer->lex(token);
+}
+
+void DirectiveParser::parseIf(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_IF);
+ parseConditionalIf(token);
+}
+
+void DirectiveParser::parseIfdef(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_IFDEF);
+ parseConditionalIf(token);
+}
+
+void DirectiveParser::parseIfndef(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_IFNDEF);
+ parseConditionalIf(token);
+}
+
+void DirectiveParser::parseElse(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_ELSE);
+
+ if (mConditionalStack.empty())
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_ELSE_WITHOUT_IF,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ ConditionalBlock& block = mConditionalStack.back();
+ if (block.skipBlock)
+ {
+ // No diagnostics. Just skip the whole line.
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+ if (block.foundElseGroup)
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_ELSE_AFTER_ELSE,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ block.foundElseGroup = true;
+ block.skipGroup = block.foundValidGroup;
+ block.foundValidGroup = true;
+
+ // Warn if there are extra tokens after #else.
+ mTokenizer->lex(token);
+ if (!isEOD(token))
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ }
+}
+
+void DirectiveParser::parseElif(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_ELIF);
+
+ if (mConditionalStack.empty())
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_ELIF_WITHOUT_IF,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ ConditionalBlock& block = mConditionalStack.back();
+ if (block.skipBlock)
+ {
+ // No diagnostics. Just skip the whole line.
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+ if (block.foundElseGroup)
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_ELIF_AFTER_ELSE,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+ if (block.foundValidGroup)
+ {
+ // Do not parse the expression.
+ // Also be careful not to emit a diagnostic.
+ block.skipGroup = true;
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ int expression = parseExpressionIf(token);
+ block.skipGroup = expression == 0;
+ block.foundValidGroup = expression != 0;
+}
+
+void DirectiveParser::parseEndif(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_ENDIF);
+
+ if (mConditionalStack.empty())
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_ENDIF_WITHOUT_IF,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ mConditionalStack.pop_back();
+
+ // Warn if there are tokens after #endif.
+ mTokenizer->lex(token);
+ if (!isEOD(token))
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ }
+}
+
+void DirectiveParser::parseError(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_ERROR);
+
+ std::ostringstream stream;
+ mTokenizer->lex(token);
+ while ((token->type != '\n') && (token->type != Token::LAST))
+ {
+ stream << *token;
+ mTokenizer->lex(token);
+ }
+ mDirectiveHandler->handleError(token->location, stream.str());
+}
+
+// Parses pragma of form: #pragma name[(value)].
+void DirectiveParser::parsePragma(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_PRAGMA);
+
+ enum State
+ {
+ PRAGMA_NAME,
+ LEFT_PAREN,
+ PRAGMA_VALUE,
+ RIGHT_PAREN
+ };
+
+ bool valid = true;
+ std::string name, value;
+ int state = PRAGMA_NAME;
+
+ mTokenizer->lex(token);
+ while ((token->type != '\n') && (token->type != Token::LAST))
+ {
+ switch(state++)
+ {
+ case PRAGMA_NAME:
+ name = token->text;
+ valid = valid && (token->type == Token::IDENTIFIER);
+ break;
+ case LEFT_PAREN:
+ valid = valid && (token->type == '(');
+ break;
+ case PRAGMA_VALUE:
+ value = token->text;
+ valid = valid && (token->type == Token::IDENTIFIER);
+ break;
+ case RIGHT_PAREN:
+ valid = valid && (token->type == ')');
+ break;
+ default:
+ valid = false;
+ break;
+ }
+ mTokenizer->lex(token);
+ }
+
+ valid = valid && ((state == PRAGMA_NAME) || // Empty pragma.
+ (state == LEFT_PAREN) || // Without value.
+ (state == RIGHT_PAREN + 1)); // With value.
+ if (!valid)
+ {
+ mDiagnostics->report(Diagnostics::UNRECOGNIZED_PRAGMA,
+ token->location, name);
+ }
+ else if (state > PRAGMA_NAME) // Do not notify for empty pragma.
+ {
+ mDirectiveHandler->handlePragma(token->location, name, value);
+ }
+}
+
+void DirectiveParser::parseExtension(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_EXTENSION);
+
+ enum State
+ {
+ EXT_NAME,
+ COLON,
+ EXT_BEHAVIOR
+ };
+
+ bool valid = true;
+ std::string name, behavior;
+ int state = EXT_NAME;
+
+ mTokenizer->lex(token);
+ while ((token->type != '\n') && (token->type != Token::LAST))
+ {
+ switch (state++)
+ {
+ case EXT_NAME:
+ if (valid && (token->type != Token::IDENTIFIER))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_EXTENSION_NAME,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid) name = token->text;
+ break;
+ case COLON:
+ if (valid && (token->type != ':'))
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ case EXT_BEHAVIOR:
+ if (valid && (token->type != Token::IDENTIFIER))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_EXTENSION_BEHAVIOR,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid) behavior = token->text;
+ break;
+ default:
+ if (valid)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ }
+ mTokenizer->lex(token);
+ }
+ if (valid && (state != EXT_BEHAVIOR + 1))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_EXTENSION_DIRECTIVE,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid)
+ mDirectiveHandler->handleExtension(token->location, name, behavior);
+}
+
+void DirectiveParser::parseVersion(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_VERSION);
+
+ if (mPastFirstStatement)
+ {
+ mDiagnostics->report(Diagnostics::VERSION_NOT_FIRST_STATEMENT,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return;
+ }
+
+ enum State
+ {
+ VERSION_NUMBER
+ };
+
+ bool valid = true;
+ int version = 0;
+ int state = VERSION_NUMBER;
+
+ mTokenizer->lex(token);
+ while ((token->type != '\n') && (token->type != Token::LAST))
+ {
+ switch (state++)
+ {
+ case VERSION_NUMBER:
+ if (valid && (token->type != Token::CONST_INT))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_VERSION_NUMBER,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid && !token->iValue(&version))
+ {
+ mDiagnostics->report(Diagnostics::INTEGER_OVERFLOW,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ default:
+ if (valid)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ }
+ mTokenizer->lex(token);
+ }
+ if (valid && (state != VERSION_NUMBER + 1))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_VERSION_DIRECTIVE,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid)
+ mDirectiveHandler->handleVersion(token->location, version);
+}
+
+void DirectiveParser::parseLine(Token* token)
+{
+ assert(getDirective(token) == DIRECTIVE_LINE);
+
+ enum State
+ {
+ LINE_NUMBER,
+ FILE_NUMBER
+ };
+
+ bool valid = true;
+ int line = 0, file = 0;
+ int state = LINE_NUMBER;
+
+ MacroExpander macroExpander(mTokenizer, mMacroSet, mDiagnostics);
+ macroExpander.lex(token);
+ while ((token->type != '\n') && (token->type != Token::LAST))
+ {
+ switch (state++)
+ {
+ case LINE_NUMBER:
+ if (valid && (token->type != Token::CONST_INT))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_LINE_NUMBER,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid && !token->iValue(&line))
+ {
+ mDiagnostics->report(Diagnostics::INTEGER_OVERFLOW,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ case FILE_NUMBER:
+ if (valid && (token->type != Token::CONST_INT))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_FILE_NUMBER,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid && !token->iValue(&file))
+ {
+ mDiagnostics->report(Diagnostics::INTEGER_OVERFLOW,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ default:
+ if (valid)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ valid = false;
+ }
+ break;
+ }
+ macroExpander.lex(token);
+ }
+
+ if (valid && (state != FILE_NUMBER) && (state != FILE_NUMBER + 1))
+ {
+ mDiagnostics->report(Diagnostics::INVALID_LINE_DIRECTIVE,
+ token->location, token->text);
+ valid = false;
+ }
+ if (valid)
+ {
+ mTokenizer->setLineNumber(line);
+ if (state == FILE_NUMBER + 1) mTokenizer->setFileNumber(file);
+ }
+}
+
+bool DirectiveParser::skipping() const
+{
+ if (mConditionalStack.empty()) return false;
+
+ const ConditionalBlock& block = mConditionalStack.back();
+ return block.skipBlock || block.skipGroup;
+}
+
+void DirectiveParser::parseConditionalIf(Token* token)
+{
+ ConditionalBlock block;
+ block.type = token->text;
+ block.location = token->location;
+
+ if (skipping())
+ {
+ // This conditional block is inside another conditional group
+ // which is skipped. As a consequence this whole block is skipped.
+ // Be careful not to parse the conditional expression that might
+ // emit a diagnostic.
+ skipUntilEOD(mTokenizer, token);
+ block.skipBlock = true;
+ }
+ else
+ {
+ DirectiveType directive = getDirective(token);
+
+ int expression = 0;
+ switch (directive)
+ {
+ case DIRECTIVE_IF:
+ expression = parseExpressionIf(token);
+ break;
+ case DIRECTIVE_IFDEF:
+ expression = parseExpressionIfdef(token);
+ break;
+ case DIRECTIVE_IFNDEF:
+ expression = parseExpressionIfdef(token) == 0 ? 1 : 0;
+ break;
+ default:
+ assert(false);
+ break;
+ }
+ block.skipGroup = expression == 0;
+ block.foundValidGroup = expression != 0;
+ }
+ mConditionalStack.push_back(block);
+}
+
+int DirectiveParser::parseExpressionIf(Token* token)
+{
+ assert((getDirective(token) == DIRECTIVE_IF) ||
+ (getDirective(token) == DIRECTIVE_ELIF));
+
+ DefinedParser definedParser(mTokenizer, mMacroSet, mDiagnostics);
+ MacroExpander macroExpander(&definedParser, mMacroSet, mDiagnostics);
+ ExpressionParser expressionParser(&macroExpander, mDiagnostics);
+
+ int expression = 0;
+ macroExpander.lex(token);
+ expressionParser.parse(token, &expression);
+
+ // Warn if there are tokens after #if expression.
+ if (!isEOD(token))
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ }
+
+ return expression;
+}
+
+int DirectiveParser::parseExpressionIfdef(Token* token)
+{
+ assert((getDirective(token) == DIRECTIVE_IFDEF) ||
+ (getDirective(token) == DIRECTIVE_IFNDEF));
+
+ mTokenizer->lex(token);
+ if (token->type != Token::IDENTIFIER)
+ {
+ mDiagnostics->report(Diagnostics::UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ return 0;
+ }
+
+ MacroSet::const_iterator iter = mMacroSet->find(token->text);
+ int expression = iter != mMacroSet->end() ? 1 : 0;
+
+ // Warn if there are tokens after #ifdef expression.
+ mTokenizer->lex(token);
+ if (!isEOD(token))
+ {
+ mDiagnostics->report(Diagnostics::CONDITIONAL_UNEXPECTED_TOKEN,
+ token->location, token->text);
+ skipUntilEOD(mTokenizer, token);
+ }
+ return expression;
+}
+
+} // namespace pp
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.h b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.h
new file mode 100644
index 0000000000..8a7f0072ba
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/DirectiveParser.h
@@ -0,0 +1,82 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_DIRECTIVE_PARSER_H_
+#define COMPILER_PREPROCESSOR_DIRECTIVE_PARSER_H_
+
+#include "Lexer.h"
+#include "Macro.h"
+#include "pp_utils.h"
+#include "SourceLocation.h"
+
+namespace pp
+{
+
+class Diagnostics;
+class DirectiveHandler;
+class Tokenizer;
+
+class DirectiveParser : public Lexer
+{
+ public:
+ DirectiveParser(Tokenizer* tokenizer,
+ MacroSet* macroSet,
+ Diagnostics* diagnostics,
+ DirectiveHandler* directiveHandler);
+
+ virtual void lex(Token* token);
+
+ private:
+ PP_DISALLOW_COPY_AND_ASSIGN(DirectiveParser);
+
+ void parseDirective(Token* token);
+ void parseDefine(Token* token);
+ void parseUndef(Token* token);
+ void parseIf(Token* token);
+ void parseIfdef(Token* token);
+ void parseIfndef(Token* token);
+ void parseElse(Token* token);
+ void parseElif(Token* token);
+ void parseEndif(Token* token);
+ void parseError(Token* token);
+ void parsePragma(Token* token);
+ void parseExtension(Token* token);
+ void parseVersion(Token* token);
+ void parseLine(Token* token);
+
+ bool skipping() const;
+ void parseConditionalIf(Token* token);
+ int parseExpressionIf(Token* token);
+ int parseExpressionIfdef(Token* token);
+
+ struct ConditionalBlock
+ {
+ std::string type;
+ SourceLocation location;
+ bool skipBlock;
+ bool skipGroup;
+ bool foundValidGroup;
+ bool foundElseGroup;
+
+ ConditionalBlock() :
+ skipBlock(false),
+ skipGroup(false),
+ foundValidGroup(false),
+ foundElseGroup(false)
+ {
+ }
+ };
+ bool mPastFirstStatement;
+ std::vector<ConditionalBlock> mConditionalStack;
+ Tokenizer* mTokenizer;
+ MacroSet* mMacroSet;
+ Diagnostics* mDiagnostics;
+ DirectiveHandler* mDirectiveHandler;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_DIRECTIVE_PARSER_H_
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.h b/src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.h
new file mode 100644
index 0000000000..092d059413
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.h
@@ -0,0 +1,34 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_EXPRESSION_PARSER_H_
+#define COMPILER_PREPROCESSOR_EXPRESSION_PARSER_H_
+
+#include "pp_utils.h"
+
+namespace pp
+{
+
+class Diagnostics;
+class Lexer;
+struct Token;
+
+class ExpressionParser
+{
+ public:
+ ExpressionParser(Lexer* lexer, Diagnostics* diagnostics);
+
+ bool parse(Token* token, int* result);
+
+ private:
+ PP_DISALLOW_COPY_AND_ASSIGN(ExpressionParser);
+
+ Lexer* mLexer;
+ Diagnostics* mDiagnostics;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_EXPRESSION_PARSER_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.y b/src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.y
new file mode 100644
index 0000000000..832ad4001e
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/ExpressionParser.y
@@ -0,0 +1,279 @@
+/*
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+This file contains the Yacc grammar for GLSL ES preprocessor expression.
+
+IF YOU MODIFY THIS FILE YOU ALSO NEED TO RUN generate_parser.sh,
+WHICH GENERATES THE GLSL ES preprocessor expression parser.
+*/
+
+%{
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+// This file is auto-generated by generate_parser.sh. DO NOT EDIT!
+
+#if defined(__GNUC__)
+// Triggered by the auto-generated pplval variable.
+#pragma GCC diagnostic ignored "-Wuninitialized"
+#elif defined(_MSC_VER)
+#pragma warning(disable: 4065 4701)
+#endif
+
+#include "ExpressionParser.h"
+
+#include <cassert>
+#include <sstream>
+
+#include "Diagnostics.h"
+#include "Lexer.h"
+#include "Token.h"
+
+#if defined(_MSC_VER)
+typedef __int64 YYSTYPE;
+#else
+#include <stdint.h>
+typedef intmax_t YYSTYPE;
+#endif // _MSC_VER
+#define YYSTYPE_IS_TRIVIAL 1
+#define YYSTYPE_IS_DECLARED 1
+
+namespace {
+struct Context
+{
+ pp::Diagnostics* diagnostics;
+ pp::Lexer* lexer;
+ pp::Token* token;
+ int* result;
+};
+} // namespace
+%}
+
+%pure-parser
+%name-prefix="pp"
+%parse-param {Context *context}
+%lex-param {Context *context}
+
+%{
+static int yylex(YYSTYPE* lvalp, Context* context);
+static void yyerror(Context* context, const char* reason);
+%}
+
+%token TOK_CONST_INT
+%left TOK_OP_OR
+%left TOK_OP_AND
+%left '|'
+%left '^'
+%left '&'
+%left TOK_OP_EQ TOK_OP_NE
+%left '<' '>' TOK_OP_LE TOK_OP_GE
+%left TOK_OP_LEFT TOK_OP_RIGHT
+%left '+' '-'
+%left '*' '/' '%'
+%right TOK_UNARY
+
+%%
+
+input
+ : expression {
+ *(context->result) = static_cast<int>($1);
+ YYACCEPT;
+ }
+;
+
+expression
+ : TOK_CONST_INT
+ | expression TOK_OP_OR expression {
+ $$ = $1 || $3;
+ }
+ | expression TOK_OP_AND expression {
+ $$ = $1 && $3;
+ }
+ | expression '|' expression {
+ $$ = $1 | $3;
+ }
+ | expression '^' expression {
+ $$ = $1 ^ $3;
+ }
+ | expression '&' expression {
+ $$ = $1 & $3;
+ }
+ | expression TOK_OP_NE expression {
+ $$ = $1 != $3;
+ }
+ | expression TOK_OP_EQ expression {
+ $$ = $1 == $3;
+ }
+ | expression TOK_OP_GE expression {
+ $$ = $1 >= $3;
+ }
+ | expression TOK_OP_LE expression {
+ $$ = $1 <= $3;
+ }
+ | expression '>' expression {
+ $$ = $1 > $3;
+ }
+ | expression '<' expression {
+ $$ = $1 < $3;
+ }
+ | expression TOK_OP_RIGHT expression {
+ $$ = $1 >> $3;
+ }
+ | expression TOK_OP_LEFT expression {
+ $$ = $1 << $3;
+ }
+ | expression '-' expression {
+ $$ = $1 - $3;
+ }
+ | expression '+' expression {
+ $$ = $1 + $3;
+ }
+ | expression '%' expression {
+ if ($3 == 0) {
+ std::ostringstream stream;
+ stream << $1 << " % " << $3;
+ std::string text = stream.str();
+ context->diagnostics->report(pp::Diagnostics::DIVISION_BY_ZERO,
+ context->token->location,
+ text.c_str());
+ YYABORT;
+ } else {
+ $$ = $1 % $3;
+ }
+ }
+ | expression '/' expression {
+ if ($3 == 0) {
+ std::ostringstream stream;
+ stream << $1 << " / " << $3;
+ std::string text = stream.str();
+ context->diagnostics->report(pp::Diagnostics::DIVISION_BY_ZERO,
+ context->token->location,
+ text.c_str());
+ YYABORT;
+ } else {
+ $$ = $1 / $3;
+ }
+ }
+ | expression '*' expression {
+ $$ = $1 * $3;
+ }
+ | '!' expression %prec TOK_UNARY {
+ $$ = ! $2;
+ }
+ | '~' expression %prec TOK_UNARY {
+ $$ = ~ $2;
+ }
+ | '-' expression %prec TOK_UNARY {
+ $$ = - $2;
+ }
+ | '+' expression %prec TOK_UNARY {
+ $$ = + $2;
+ }
+ | '(' expression ')' {
+ $$ = $2;
+ }
+;
+
+%%
+
+int yylex(YYSTYPE* lvalp, Context* context)
+{
+ int type = 0;
+
+ pp::Token* token = context->token;
+ switch (token->type)
+ {
+ case pp::Token::CONST_INT:
+ {
+ unsigned int val = 0;
+ if (!token->uValue(&val))
+ {
+ context->diagnostics->report(pp::Diagnostics::INTEGER_OVERFLOW,
+ token->location, token->text);
+ }
+ *lvalp = static_cast<YYSTYPE>(val);
+ type = TOK_CONST_INT;
+ break;
+ }
+ case pp::Token::OP_OR: type = TOK_OP_OR; break;
+ case pp::Token::OP_AND: type = TOK_OP_AND; break;
+ case pp::Token::OP_NE: type = TOK_OP_NE; break;
+ case pp::Token::OP_EQ: type = TOK_OP_EQ; break;
+ case pp::Token::OP_GE: type = TOK_OP_GE; break;
+ case pp::Token::OP_LE: type = TOK_OP_LE; break;
+ case pp::Token::OP_RIGHT: type = TOK_OP_RIGHT; break;
+ case pp::Token::OP_LEFT: type = TOK_OP_LEFT; break;
+ case '|': type = '|'; break;
+ case '^': type = '^'; break;
+ case '&': type = '&'; break;
+ case '>': type = '>'; break;
+ case '<': type = '<'; break;
+ case '-': type = '-'; break;
+ case '+': type = '+'; break;
+ case '%': type = '%'; break;
+ case '/': type = '/'; break;
+ case '*': type = '*'; break;
+ case '!': type = '!'; break;
+ case '~': type = '~'; break;
+ case '(': type = '('; break;
+ case ')': type = ')'; break;
+
+ default: break;
+ }
+
+ // Advance to the next token if the current one is valid.
+ if (type != 0) context->lexer->lex(token);
+
+ return type;
+}
+
+void yyerror(Context* context, const char* reason)
+{
+ context->diagnostics->report(pp::Diagnostics::INVALID_EXPRESSION,
+ context->token->location,
+ reason);
+}
+
+namespace pp {
+
+ExpressionParser::ExpressionParser(Lexer* lexer, Diagnostics* diagnostics) :
+ mLexer(lexer),
+ mDiagnostics(diagnostics)
+{
+}
+
+bool ExpressionParser::parse(Token* token, int* result)
+{
+ Context context;
+ context.diagnostics = mDiagnostics;
+ context.lexer = mLexer;
+ context.token = token;
+ context.result = result;
+ int ret = yyparse(&context);
+ switch (ret)
+ {
+ case 0:
+ case 1:
+ break;
+
+ case 2:
+ mDiagnostics->report(Diagnostics::OUT_OF_MEMORY, token->location, "");
+ break;
+
+ default:
+ assert(false);
+ mDiagnostics->report(Diagnostics::INTERNAL_ERROR, token->location, "");
+ break;
+ }
+
+ return ret == 0;
+}
+
+} // namespace pp
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Input.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/Input.cpp
new file mode 100644
index 0000000000..c3de95f313
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Input.cpp
@@ -0,0 +1,55 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "Input.h"
+
+#include <algorithm>
+#include <cassert>
+#include <cstring>
+
+namespace pp
+{
+
+Input::Input() : mCount(0), mString(0)
+{
+}
+
+Input::Input(int count, const char* const string[], const int length[]) :
+ mCount(count),
+ mString(string)
+{
+ assert(mCount >= 0);
+ mLength.reserve(mCount);
+ for (int i = 0; i < mCount; ++i)
+ {
+ int len = length ? length[i] : -1;
+ mLength.push_back(len < 0 ? strlen(mString[i]) : len);
+ }
+}
+
+int Input::read(char* buf, int maxSize)
+{
+ int nRead = 0;
+ while ((nRead < maxSize) && (mReadLoc.sIndex < mCount))
+ {
+ int size = mLength[mReadLoc.sIndex] - mReadLoc.cIndex;
+ size = std::min(size, maxSize);
+ memcpy(buf + nRead, mString[mReadLoc.sIndex] + mReadLoc.cIndex, size);
+ nRead += size;
+ mReadLoc.cIndex += size;
+
+ // Advance string if we reached the end of current string.
+ if (mReadLoc.cIndex == mLength[mReadLoc.sIndex])
+ {
+ ++mReadLoc.sIndex;
+ mReadLoc.cIndex = 0;
+ }
+ }
+ return nRead;
+}
+
+} // namespace pp
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Input.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Input.h
new file mode 100644
index 0000000000..dac734b68d
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Input.h
@@ -0,0 +1,48 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_INPUT_H_
+#define COMPILER_PREPROCESSOR_INPUT_H_
+
+#include <vector>
+
+namespace pp
+{
+
+// Holds and reads input for Lexer.
+class Input
+{
+ public:
+ Input();
+ Input(int count, const char* const string[], const int length[]);
+
+ int count() const { return mCount; }
+ const char* string(int index) const { return mString[index]; }
+ int length(int index) const { return mLength[index]; }
+
+ int read(char* buf, int maxSize);
+
+ struct Location
+ {
+ int sIndex; // String index;
+ int cIndex; // Char index.
+
+ Location() : sIndex(0), cIndex(0) { }
+ };
+ const Location& readLoc() const { return mReadLoc; }
+
+ private:
+ // Input.
+ int mCount;
+ const char* const* mString;
+ std::vector<int> mLength;
+
+ Location mReadLoc;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_INPUT_H_
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.cpp
new file mode 100644
index 0000000000..7c663ee761
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.cpp
@@ -0,0 +1,16 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "Lexer.h"
+
+namespace pp
+{
+
+Lexer::~Lexer()
+{
+}
+
+} // namespace pp
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.h
new file mode 100644
index 0000000000..eb85cea873
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Lexer.h
@@ -0,0 +1,25 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_LEXER_H_
+#define COMPILER_PREPROCESSOR_LEXER_H_
+
+namespace pp
+{
+
+struct Token;
+
+class Lexer
+{
+ public:
+ virtual ~Lexer();
+
+ virtual void lex(Token* token) = 0;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_LEXER_H_
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Macro.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/Macro.cpp
new file mode 100644
index 0000000000..b2e3088e32
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Macro.cpp
@@ -0,0 +1,23 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "Macro.h"
+
+#include "Token.h"
+
+namespace pp
+{
+
+bool Macro::equals(const Macro& other) const
+{
+ return (type == other.type) &&
+ (name == other.name) &&
+ (parameters == other.parameters) &&
+ (replacements == other.replacements);
+}
+
+} // namespace pp
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Macro.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Macro.h
new file mode 100644
index 0000000000..7ec0149116
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Macro.h
@@ -0,0 +1,44 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_MACRO_H_
+#define COMPILER_PREPROCESSOR_MACRO_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+namespace pp
+{
+
+struct Token;
+
+struct Macro
+{
+ enum Type
+ {
+ kTypeObj,
+ kTypeFunc
+ };
+ typedef std::vector<std::string> Parameters;
+ typedef std::vector<Token> Replacements;
+
+ Macro() : predefined(false), disabled(false), type(kTypeObj) { }
+ bool equals(const Macro& other) const;
+
+ bool predefined;
+ mutable bool disabled;
+
+ Type type;
+ std::string name;
+ Parameters parameters;
+ Replacements replacements;
+};
+
+typedef std::map<std::string, Macro> MacroSet;
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_MACRO_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.cpp
new file mode 100644
index 0000000000..701cec9a4b
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.cpp
@@ -0,0 +1,370 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "MacroExpander.h"
+
+#include <algorithm>
+#include <sstream>
+
+#include "Diagnostics.h"
+#include "Token.h"
+
+namespace pp
+{
+
+class TokenLexer : public Lexer
+{
+ public:
+ typedef std::vector<Token> TokenVector;
+
+ TokenLexer(TokenVector* tokens)
+ {
+ tokens->swap(mTokens);
+ mIter = mTokens.begin();
+ }
+
+ virtual void lex(Token* token)
+ {
+ if (mIter == mTokens.end())
+ {
+ token->reset();
+ token->type = Token::LAST;
+ }
+ else
+ {
+ *token = *mIter++;
+ }
+ }
+
+ private:
+ PP_DISALLOW_COPY_AND_ASSIGN(TokenLexer);
+
+ TokenVector mTokens;
+ TokenVector::const_iterator mIter;
+};
+
+MacroExpander::MacroExpander(Lexer* lexer,
+ MacroSet* macroSet,
+ Diagnostics* diagnostics) :
+ mLexer(lexer),
+ mMacroSet(macroSet),
+ mDiagnostics(diagnostics)
+{
+}
+
+MacroExpander::~MacroExpander()
+{
+ for (size_t i = 0; i < mContextStack.size(); ++i)
+ {
+ delete mContextStack[i];
+ }
+}
+
+void MacroExpander::lex(Token* token)
+{
+ while (true)
+ {
+ getToken(token);
+
+ if (token->type != Token::IDENTIFIER)
+ break;
+
+ if (token->expansionDisabled())
+ break;
+
+ MacroSet::const_iterator iter = mMacroSet->find(token->text);
+ if (iter == mMacroSet->end())
+ break;
+
+ const Macro& macro = iter->second;
+ if (macro.disabled)
+ {
+ // If a particular token is not expanded, it is never expanded.
+ token->setExpansionDisabled(true);
+ break;
+ }
+ if ((macro.type == Macro::kTypeFunc) && !isNextTokenLeftParen())
+ {
+ // If the token immediately after the macro name is not a '(',
+ // this macro should not be expanded.
+ break;
+ }
+
+ pushMacro(macro, *token);
+ }
+}
+
+void MacroExpander::getToken(Token* token)
+{
+ if (mReserveToken.get())
+ {
+ *token = *mReserveToken;
+ mReserveToken.reset();
+ return;
+ }
+
+ // First pop all empty macro contexts.
+ while (!mContextStack.empty() && mContextStack.back()->empty())
+ {
+ popMacro();
+ }
+
+ if (!mContextStack.empty())
+ {
+ *token = mContextStack.back()->get();
+ }
+ else
+ {
+ mLexer->lex(token);
+ }
+}
+
+void MacroExpander::ungetToken(const Token& token)
+{
+ if (!mContextStack.empty())
+ {
+ MacroContext* context = mContextStack.back();
+ context->unget();
+ assert(context->replacements[context->index] == token);
+ }
+ else
+ {
+ assert(!mReserveToken.get());
+ mReserveToken.reset(new Token(token));
+ }
+}
+
+bool MacroExpander::isNextTokenLeftParen()
+{
+ Token token;
+ getToken(&token);
+
+ bool lparen = token.type == '(';
+ ungetToken(token);
+
+ return lparen;
+}
+
+bool MacroExpander::pushMacro(const Macro& macro, const Token& identifier)
+{
+ assert(!macro.disabled);
+ assert(!identifier.expansionDisabled());
+ assert(identifier.type == Token::IDENTIFIER);
+ assert(identifier.text == macro.name);
+
+ std::vector<Token> replacements;
+ if (!expandMacro(macro, identifier, &replacements))
+ return false;
+
+ // Macro is disabled for expansion until it is popped off the stack.
+ macro.disabled = true;
+
+ MacroContext* context = new MacroContext;
+ context->macro = &macro;
+ context->replacements.swap(replacements);
+ mContextStack.push_back(context);
+ return true;
+}
+
+void MacroExpander::popMacro()
+{
+ assert(!mContextStack.empty());
+
+ MacroContext* context = mContextStack.back();
+ mContextStack.pop_back();
+
+ assert(context->empty());
+ assert(context->macro->disabled);
+ context->macro->disabled = false;
+ delete context;
+}
+
+bool MacroExpander::expandMacro(const Macro& macro,
+ const Token& identifier,
+ std::vector<Token>* replacements)
+{
+ replacements->clear();
+ if (macro.type == Macro::kTypeObj)
+ {
+ replacements->assign(macro.replacements.begin(),
+ macro.replacements.end());
+
+ if (macro.predefined)
+ {
+ static const std::string kLine = "__LINE__";
+ static const std::string kFile = "__FILE__";
+
+ assert(replacements->size() == 1);
+ Token& repl = replacements->front();
+ if (macro.name == kLine)
+ {
+ std::ostringstream stream;
+ stream << identifier.location.line;
+ repl.text = stream.str();
+ }
+ else if (macro.name == kFile)
+ {
+ std::ostringstream stream;
+ stream << identifier.location.file;
+ repl.text = stream.str();
+ }
+ }
+ }
+ else
+ {
+ assert(macro.type == Macro::kTypeFunc);
+ std::vector<MacroArg> args;
+ args.reserve(macro.parameters.size());
+ if (!collectMacroArgs(macro, identifier, &args))
+ return false;
+
+ replaceMacroParams(macro, args, replacements);
+ }
+
+ for (size_t i = 0; i < replacements->size(); ++i)
+ {
+ Token& repl = replacements->at(i);
+ if (i == 0)
+ {
+ // The first token in the replacement list inherits the padding
+ // properties of the identifier token.
+ repl.setAtStartOfLine(identifier.atStartOfLine());
+ repl.setHasLeadingSpace(identifier.hasLeadingSpace());
+ }
+ repl.location = identifier.location;
+ }
+ return true;
+}
+
+bool MacroExpander::collectMacroArgs(const Macro& macro,
+ const Token& identifier,
+ std::vector<MacroArg>* args)
+{
+ Token token;
+ getToken(&token);
+ assert(token.type == '(');
+
+ args->push_back(MacroArg());
+ for (int openParens = 1; openParens != 0; )
+ {
+ getToken(&token);
+
+ if (token.type == Token::LAST)
+ {
+ mDiagnostics->report(Diagnostics::MACRO_UNTERMINATED_INVOCATION,
+ identifier.location, identifier.text);
+ // Do not lose EOF token.
+ ungetToken(token);
+ return false;
+ }
+
+ bool isArg = false; // True if token is part of the current argument.
+ switch (token.type)
+ {
+ case '(':
+ ++openParens;
+ isArg = true;
+ break;
+ case ')':
+ --openParens;
+ isArg = openParens != 0;
+ break;
+ case ',':
+ // The individual arguments are separated by comma tokens, but
+ // the comma tokens between matching inner parentheses do not
+ // seperate arguments.
+ if (openParens == 1) args->push_back(MacroArg());
+ isArg = openParens != 1;
+ break;
+ default:
+ isArg = true;
+ break;
+ }
+ if (isArg)
+ {
+ MacroArg& arg = args->back();
+ // Initial whitespace is not part of the argument.
+ if (arg.empty()) token.setHasLeadingSpace(false);
+ arg.push_back(token);
+ }
+ }
+
+ const Macro::Parameters& params = macro.parameters;
+ // If there is only one empty argument, it is equivalent to no argument.
+ if (params.empty() && (args->size() == 1) && args->front().empty())
+ {
+ args->clear();
+ }
+ // Validate the number of arguments.
+ if (args->size() != params.size())
+ {
+ Diagnostics::ID id = args->size() < macro.parameters.size() ?
+ Diagnostics::MACRO_TOO_FEW_ARGS :
+ Diagnostics::MACRO_TOO_MANY_ARGS;
+ mDiagnostics->report(id, identifier.location, identifier.text);
+ return false;
+ }
+
+ // Pre-expand each argument before substitution.
+ // This step expands each argument individually before they are
+ // inserted into the macro body.
+ for (size_t i = 0; i < args->size(); ++i)
+ {
+ MacroArg& arg = args->at(i);
+ TokenLexer lexer(&arg);
+ MacroExpander expander(&lexer, mMacroSet, mDiagnostics);
+
+ arg.clear();
+ expander.lex(&token);
+ while (token.type != Token::LAST)
+ {
+ arg.push_back(token);
+ expander.lex(&token);
+ }
+ }
+ return true;
+}
+
+void MacroExpander::replaceMacroParams(const Macro& macro,
+ const std::vector<MacroArg>& args,
+ std::vector<Token>* replacements)
+{
+ for (size_t i = 0; i < macro.replacements.size(); ++i)
+ {
+ const Token& repl = macro.replacements[i];
+ if (repl.type != Token::IDENTIFIER)
+ {
+ replacements->push_back(repl);
+ continue;
+ }
+
+ // TODO(alokp): Optimize this.
+ // There is no need to search for macro params every time.
+ // The param index can be cached with the replacement token.
+ Macro::Parameters::const_iterator iter = std::find(
+ macro.parameters.begin(), macro.parameters.end(), repl.text);
+ if (iter == macro.parameters.end())
+ {
+ replacements->push_back(repl);
+ continue;
+ }
+
+ size_t iArg = std::distance(macro.parameters.begin(), iter);
+ const MacroArg& arg = args[iArg];
+ if (arg.empty())
+ {
+ continue;
+ }
+ size_t iRepl = replacements->size();
+ replacements->insert(replacements->end(), arg.begin(), arg.end());
+ // The replacement token inherits padding properties from
+ // macro replacement token.
+ replacements->at(iRepl).setHasLeadingSpace(repl.hasLeadingSpace());
+ }
+}
+
+} // namespace pp
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.h b/src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.h
new file mode 100644
index 0000000000..7c5c543871
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/MacroExpander.h
@@ -0,0 +1,75 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_MACRO_EXPANDER_H_
+#define COMPILER_PREPROCESSOR_MACRO_EXPANDER_H_
+
+#include <cassert>
+#include <memory>
+#include <vector>
+
+#include "Lexer.h"
+#include "Macro.h"
+#include "pp_utils.h"
+
+namespace pp
+{
+
+class Diagnostics;
+
+class MacroExpander : public Lexer
+{
+ public:
+ MacroExpander(Lexer* lexer, MacroSet* macroSet, Diagnostics* diagnostics);
+ virtual ~MacroExpander();
+
+ virtual void lex(Token* token);
+
+ private:
+ PP_DISALLOW_COPY_AND_ASSIGN(MacroExpander);
+
+ void getToken(Token* token);
+ void ungetToken(const Token& token);
+ bool isNextTokenLeftParen();
+
+ bool pushMacro(const Macro& macro, const Token& identifier);
+ void popMacro();
+
+ bool expandMacro(const Macro& macro,
+ const Token& identifier,
+ std::vector<Token>* replacements);
+
+ typedef std::vector<Token> MacroArg;
+ bool collectMacroArgs(const Macro& macro,
+ const Token& identifier,
+ std::vector<MacroArg>* args);
+ void replaceMacroParams(const Macro& macro,
+ const std::vector<MacroArg>& args,
+ std::vector<Token>* replacements);
+
+ struct MacroContext
+ {
+ const Macro* macro;
+ size_t index;
+ std::vector<Token> replacements;
+
+ MacroContext() : macro(0), index(0) { }
+ bool empty() const { return index == replacements.size(); }
+ const Token& get() { return replacements[index++]; }
+ void unget() { assert(index > 0); --index; }
+ };
+
+ Lexer* mLexer;
+ MacroSet* mMacroSet;
+ Diagnostics* mDiagnostics;
+
+ std::auto_ptr<Token> mReserveToken;
+ std::vector<MacroContext*> mContextStack;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_MACRO_EXPANDER_H_
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.cpp
new file mode 100644
index 0000000000..ffa7225a8f
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.cpp
@@ -0,0 +1,142 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "Preprocessor.h"
+
+#include <cassert>
+#include <sstream>
+
+#include "Diagnostics.h"
+#include "DirectiveParser.h"
+#include "Macro.h"
+#include "MacroExpander.h"
+#include "Token.h"
+#include "Tokenizer.h"
+
+namespace pp
+{
+
+struct PreprocessorImpl
+{
+ Diagnostics* diagnostics;
+ MacroSet macroSet;
+ Tokenizer tokenizer;
+ DirectiveParser directiveParser;
+ MacroExpander macroExpander;
+
+ PreprocessorImpl(Diagnostics* diag,
+ DirectiveHandler* directiveHandler) :
+ diagnostics(diag),
+ tokenizer(diag),
+ directiveParser(&tokenizer, &macroSet, diag, directiveHandler),
+ macroExpander(&directiveParser, &macroSet, diag)
+ {
+ }
+};
+
+Preprocessor::Preprocessor(Diagnostics* diagnostics,
+ DirectiveHandler* directiveHandler)
+{
+ mImpl = new PreprocessorImpl(diagnostics, directiveHandler);
+}
+
+Preprocessor::~Preprocessor()
+{
+ delete mImpl;
+}
+
+bool Preprocessor::init(int count,
+ const char* const string[],
+ const int length[])
+{
+ static const int kGLSLVersion = 100;
+
+ // Add standard pre-defined macros.
+ predefineMacro("__LINE__", 0);
+ predefineMacro("__FILE__", 0);
+ predefineMacro("__VERSION__", kGLSLVersion);
+ predefineMacro("GL_ES", 1);
+
+ return mImpl->tokenizer.init(count, string, length);
+}
+
+void Preprocessor::predefineMacro(const char* name, int value)
+{
+ std::ostringstream stream;
+ stream << value;
+
+ Token token;
+ token.type = Token::CONST_INT;
+ token.text = stream.str();
+
+ Macro macro;
+ macro.predefined = true;
+ macro.type = Macro::kTypeObj;
+ macro.name = name;
+ macro.replacements.push_back(token);
+
+ mImpl->macroSet[name] = macro;
+}
+
+void Preprocessor::lex(Token* token)
+{
+ bool validToken = false;
+ while (!validToken)
+ {
+ mImpl->macroExpander.lex(token);
+ switch (token->type)
+ {
+ // We should not be returning internal preprocessing tokens.
+ // Convert preprocessing tokens to compiler tokens or report
+ // diagnostics.
+ case Token::PP_HASH:
+ assert(false);
+ break;
+ case Token::CONST_INT:
+ {
+ int val = 0;
+ if (!token->iValue(&val))
+ {
+ // Do not mark the token as invalid.
+ // Just emit the diagnostic and reset value to 0.
+ mImpl->diagnostics->report(Diagnostics::INTEGER_OVERFLOW,
+ token->location, token->text);
+ token->text.assign("0");
+ }
+ validToken = true;
+ break;
+ }
+ case Token::CONST_FLOAT:
+ {
+ float val = 0;
+ if (!token->fValue(&val))
+ {
+ // Do not mark the token as invalid.
+ // Just emit the diagnostic and reset value to 0.0.
+ mImpl->diagnostics->report(Diagnostics::FLOAT_OVERFLOW,
+ token->location, token->text);
+ token->text.assign("0.0");
+ }
+ validToken = true;
+ break;
+ }
+ case Token::PP_NUMBER:
+ mImpl->diagnostics->report(Diagnostics::INVALID_NUMBER,
+ token->location, token->text);
+ break;
+ case Token::PP_OTHER:
+ mImpl->diagnostics->report(Diagnostics::INVALID_CHARACTER,
+ token->location, token->text);
+ break;
+ default:
+ validToken = true;
+ break;
+ }
+ }
+}
+
+} // namespace pp
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.h
new file mode 100644
index 0000000000..5fe35b27bd
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Preprocessor.h
@@ -0,0 +1,49 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_PREPROCESSOR_H_
+#define COMPILER_PREPROCESSOR_PREPROCESSOR_H_
+
+#include "pp_utils.h"
+
+namespace pp
+{
+
+class Diagnostics;
+class DirectiveHandler;
+struct PreprocessorImpl;
+struct Token;
+
+class Preprocessor
+{
+ public:
+ Preprocessor(Diagnostics* diagnostics, DirectiveHandler* directiveHandler);
+ ~Preprocessor();
+
+ // count: specifies the number of elements in the string and length arrays.
+ // string: specifies an array of pointers to strings.
+ // length: specifies an array of string lengths.
+ // If length is NULL, each string is assumed to be null terminated.
+ // If length is a value other than NULL, it points to an array containing
+ // a string length for each of the corresponding elements of string.
+ // Each element in the length array may contain the length of the
+ // corresponding string or a value less than 0 to indicate that the string
+ // is null terminated.
+ bool init(int count, const char* const string[], const int length[]);
+ // Adds a pre-defined macro.
+ void predefineMacro(const char* name, int value);
+
+ void lex(Token* token);
+
+ private:
+ PP_DISALLOW_COPY_AND_ASSIGN(Preprocessor);
+
+ PreprocessorImpl* mImpl;
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_PREPROCESSOR_H_
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/SourceLocation.h b/src/3rdparty/angle/src/compiler/preprocessor/new/SourceLocation.h
new file mode 100644
index 0000000000..6982613ac7
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/SourceLocation.h
@@ -0,0 +1,38 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_SOURCE_LOCATION_H_
+#define COMPILER_PREPROCESSOR_SOURCE_LOCATION_H_
+
+namespace pp
+{
+
+struct SourceLocation
+{
+ SourceLocation() : file(0), line(0) { }
+ SourceLocation(int f, int l) : file(f), line(l) { }
+
+ bool equals(const SourceLocation& other) const
+ {
+ return (file == other.file) && (line == other.line);
+ }
+
+ int file;
+ int line;
+};
+
+inline bool operator==(const SourceLocation& lhs, const SourceLocation& rhs)
+{
+ return lhs.equals(rhs);
+}
+
+inline bool operator!=(const SourceLocation& lhs, const SourceLocation& rhs)
+{
+ return !lhs.equals(rhs);
+}
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_SOURCE_LOCATION_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Token.cpp b/src/3rdparty/angle/src/compiler/preprocessor/new/Token.cpp
new file mode 100644
index 0000000000..67f50aa32c
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Token.cpp
@@ -0,0 +1,83 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#include "Token.h"
+
+#include <cassert>
+
+#include "numeric_lex.h"
+
+namespace pp
+{
+
+void Token::reset()
+{
+ type = 0;
+ flags = 0;
+ location = SourceLocation();
+ text.clear();
+}
+
+bool Token::equals(const Token& other) const
+{
+ return (type == other.type) &&
+ (flags == other.flags) &&
+ (location == other.location) &&
+ (text == other.text);
+}
+
+void Token::setAtStartOfLine(bool start)
+{
+ if (start)
+ flags |= AT_START_OF_LINE;
+ else
+ flags &= ~AT_START_OF_LINE;
+}
+
+void Token::setHasLeadingSpace(bool space)
+{
+ if (space)
+ flags |= HAS_LEADING_SPACE;
+ else
+ flags &= ~HAS_LEADING_SPACE;
+}
+
+void Token::setExpansionDisabled(bool disable)
+{
+ if (disable)
+ flags |= EXPANSION_DISABLED;
+ else
+ flags &= ~EXPANSION_DISABLED;
+}
+
+bool Token::iValue(int* value) const
+{
+ assert(type == CONST_INT);
+ return numeric_lex_int(text, value);
+}
+
+bool Token::uValue(unsigned int* value) const
+{
+ assert(type == CONST_INT);
+ return numeric_lex_int(text, value);
+}
+
+bool Token::fValue(float* value) const
+{
+ assert(type == CONST_FLOAT);
+ return numeric_lex_float(text, value);
+}
+
+std::ostream& operator<<(std::ostream& out, const Token& token)
+{
+ if (token.hasLeadingSpace())
+ out << " ";
+
+ out << token.text;
+ return out;
+}
+
+} // namespace pp
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Token.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Token.h
new file mode 100644
index 0000000000..8b553aecb6
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Token.h
@@ -0,0 +1,106 @@
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_TOKEN_H_
+#define COMPILER_PREPROCESSOR_TOKEN_H_
+
+#include <ostream>
+#include <string>
+
+#include "SourceLocation.h"
+
+namespace pp
+{
+
+struct Token
+{
+ enum Type
+ {
+ LAST = 0, // EOF.
+
+ IDENTIFIER = 258,
+
+ CONST_INT,
+ CONST_FLOAT,
+
+ OP_INC,
+ OP_DEC,
+ OP_LEFT,
+ OP_RIGHT,
+ OP_LE,
+ OP_GE,
+ OP_EQ,
+ OP_NE,
+ OP_AND,
+ OP_XOR,
+ OP_OR,
+ OP_ADD_ASSIGN,
+ OP_SUB_ASSIGN,
+ OP_MUL_ASSIGN,
+ OP_DIV_ASSIGN,
+ OP_MOD_ASSIGN,
+ OP_LEFT_ASSIGN,
+ OP_RIGHT_ASSIGN,
+ OP_AND_ASSIGN,
+ OP_XOR_ASSIGN,
+ OP_OR_ASSIGN,
+
+ // Preprocessing token types.
+ // These types are used by the preprocessor internally.
+ // Preprocessor clients must not depend or check for them.
+ PP_HASH,
+ PP_NUMBER,
+ PP_OTHER
+ };
+ enum Flags
+ {
+ AT_START_OF_LINE = 1 << 0,
+ HAS_LEADING_SPACE = 1 << 1,
+ EXPANSION_DISABLED = 1 << 2
+ };
+
+ Token() : type(0), flags(0) { }
+
+ void reset();
+ bool equals(const Token& other) const;
+
+ // Returns true if this is the first token on line.
+ // It disregards any leading whitespace.
+ bool atStartOfLine() const { return (flags & AT_START_OF_LINE) != 0; }
+ void setAtStartOfLine(bool start);
+
+ bool hasLeadingSpace() const { return (flags & HAS_LEADING_SPACE) != 0; }
+ void setHasLeadingSpace(bool space);
+
+ bool expansionDisabled() const { return (flags & EXPANSION_DISABLED) != 0; }
+ void setExpansionDisabled(bool disable);
+
+ // Converts text into numeric value for CONST_INT and CONST_FLOAT token.
+ // Returns false if the parsed value cannot fit into an int or float.
+ bool iValue(int* value) const;
+ bool uValue(unsigned int* value) const;
+ bool fValue(float* value) const;
+
+ int type;
+ unsigned int flags;
+ SourceLocation location;
+ std::string text;
+};
+
+inline bool operator==(const Token& lhs, const Token& rhs)
+{
+ return lhs.equals(rhs);
+}
+
+inline bool operator!=(const Token& lhs, const Token& rhs)
+{
+ return !lhs.equals(rhs);
+}
+
+extern std::ostream& operator<<(std::ostream& out, const Token& token);
+
+} // namepsace pp
+#endif // COMPILER_PREPROCESSOR_TOKEN_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.h b/src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.h
new file mode 100644
index 0000000000..a594d2d865
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.h
@@ -0,0 +1,58 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+#ifndef COMPILER_PREPROCESSOR_TOKENIZER_H_
+#define COMPILER_PREPROCESSOR_TOKENIZER_H_
+
+#include "Input.h"
+#include "Lexer.h"
+#include "pp_utils.h"
+
+namespace pp
+{
+
+class Diagnostics;
+
+class Tokenizer : public Lexer
+{
+ public:
+ struct Context
+ {
+ Diagnostics* diagnostics;
+
+ Input input;
+ // The location where yytext points to. Token location should track
+ // scanLoc instead of Input::mReadLoc because they may not be the same
+ // if text is buffered up in the scanner input buffer.
+ Input::Location scanLoc;
+
+ bool leadingSpace;
+ bool lineStart;
+ };
+ static const size_t kMaxTokenLength;
+
+ Tokenizer(Diagnostics* diagnostics);
+ ~Tokenizer();
+
+ bool init(int count, const char* const string[], const int length[]);
+
+ void setFileNumber(int file);
+ void setLineNumber(int line);
+
+ virtual void lex(Token* token);
+
+ private:
+ PP_DISALLOW_COPY_AND_ASSIGN(Tokenizer);
+ bool initScanner();
+ void destroyScanner();
+
+ void* mHandle; // Scanner handle.
+ Context mContext; // Scanner extra.
+};
+
+} // namespace pp
+#endif // COMPILER_PREPROCESSOR_TOKENIZER_H_
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.l b/src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.l
new file mode 100644
index 0000000000..9762988350
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/Tokenizer.l
@@ -0,0 +1,340 @@
+/*
+//
+// Copyright (c) 2002-2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+This file contains the Lex specification for GLSL ES preprocessor.
+Based on Microsoft Visual Studio 2010 Preprocessor Grammar:
+http://msdn.microsoft.com/en-us/library/2scxys89.aspx
+
+IF YOU MODIFY THIS FILE YOU ALSO NEED TO RUN generate_parser.sh.
+*/
+
+%top{
+//
+// Copyright (c) 2011 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+// This file is auto-generated by generate_parser.sh. DO NOT EDIT!
+}
+
+%{
+#include "Tokenizer.h"
+
+#include "Diagnostics.h"
+#include "Token.h"
+
+#if defined(__GNUC__)
+// Triggered by the auto-generated yy_fatal_error function.
+#pragma GCC diagnostic ignored "-Wmissing-noreturn"
+#endif
+
+typedef std::string YYSTYPE;
+typedef pp::SourceLocation YYLTYPE;
+
+// Use the unused yycolumn variable to track file (string) number.
+#define yyfileno yycolumn
+
+#define YY_USER_INIT \
+ do { \
+ yyfileno = 0; \
+ yylineno = 1; \
+ yyextra->leadingSpace = false; \
+ yyextra->lineStart = true; \
+ } while(0);
+
+#define YY_USER_ACTION \
+ do \
+ { \
+ pp::Input* input = &yyextra->input; \
+ pp::Input::Location* scanLoc = &yyextra->scanLoc; \
+ while ((scanLoc->sIndex < input->count()) && \
+ (scanLoc->cIndex >= input->length(scanLoc->sIndex))) \
+ { \
+ scanLoc->cIndex -= input->length(scanLoc->sIndex++); \
+ ++yyfileno; yylineno = 1; \
+ } \
+ yylloc->file = yyfileno; \
+ yylloc->line = yylineno; \
+ scanLoc->cIndex += yyleng; \
+ } while(0);
+
+#define YY_INPUT(buf, result, maxSize) \
+ result = yyextra->input.read(buf, maxSize);
+
+%}
+
+%option noyywrap nounput never-interactive
+%option reentrant bison-bridge bison-locations
+%option prefix="pp"
+%option extra-type="pp::Tokenizer::Context*"
+%x COMMENT
+
+NEWLINE \n|\r|\r\n
+IDENTIFIER [_a-zA-Z][_a-zA-Z0-9]*
+PUNCTUATOR [][<>(){}.+-/*%^|&~=!:;,?]
+
+DECIMAL_CONSTANT [1-9][0-9]*
+OCTAL_CONSTANT 0[0-7]*
+HEXADECIMAL_CONSTANT 0[xX][0-9a-fA-F]+
+
+DIGIT [0-9]
+EXPONENT_PART [eE][+-]?{DIGIT}+
+FRACTIONAL_CONSTANT ({DIGIT}*"."{DIGIT}+)|({DIGIT}+".")
+
+%%
+
+ /* Line comment */
+"//"[^\r\n]*
+
+ /* Block comment */
+ /* Line breaks are just counted - not returned. */
+ /* The comment is replaced by a single space. */
+"/*" { BEGIN(COMMENT); }
+<COMMENT>[^*\r\n]+
+<COMMENT>"*"
+<COMMENT>{NEWLINE} { ++yylineno; }
+<COMMENT>"*/" {
+ yyextra->leadingSpace = true;
+ BEGIN(INITIAL);
+}
+
+# {
+ // # is only valid at start of line for preprocessor directives.
+ yylval->assign(1, yytext[0]);
+ return yyextra->lineStart ? pp::Token::PP_HASH : pp::Token::PP_OTHER;
+}
+
+{IDENTIFIER} {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::IDENTIFIER;
+}
+
+{DECIMAL_CONSTANT}|{OCTAL_CONSTANT}|{HEXADECIMAL_CONSTANT} {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::CONST_INT;
+}
+
+({DIGIT}+{EXPONENT_PART})|({FRACTIONAL_CONSTANT}{EXPONENT_PART}?) {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::CONST_FLOAT;
+}
+
+ /* Anything that starts with a {DIGIT} or .{DIGIT} must be a number. */
+ /* Rule to catch all invalid integers and floats. */
+({DIGIT}+[_a-zA-Z0-9.]*)|("."{DIGIT}+[_a-zA-Z0-9.]*) {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::PP_NUMBER;
+}
+
+"++" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_INC;
+}
+"--" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_DEC;
+}
+"<<" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_LEFT;
+}
+">>" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_RIGHT;
+}
+"<=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_LE;
+}
+">=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_GE;
+}
+"==" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_EQ;
+}
+"!=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_NE;
+}
+"&&" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_AND;
+}
+"^^" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_XOR;
+}
+"||" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_OR;
+}
+"+=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_ADD_ASSIGN;
+}
+"-=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_SUB_ASSIGN;
+}
+"*=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_MUL_ASSIGN;
+}
+"/=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_DIV_ASSIGN;
+}
+"%=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_MOD_ASSIGN;
+}
+"<<=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_LEFT_ASSIGN;
+}
+">>=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_RIGHT_ASSIGN;
+}
+"&=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_AND_ASSIGN;
+}
+"^=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_XOR_ASSIGN;
+}
+"|=" {
+ yylval->assign(yytext, yyleng);
+ return pp::Token::OP_OR_ASSIGN;
+}
+
+{PUNCTUATOR} {
+ yylval->assign(1, yytext[0]);
+ return yytext[0];
+}
+
+[ \t\v\f]+ { yyextra->leadingSpace = true; }
+
+{NEWLINE} {
+ ++yylineno;
+ yylval->assign(1, '\n');
+ return '\n';
+}
+
+. {
+ yylval->assign(1, yytext[0]);
+ return pp::Token::PP_OTHER;
+}
+
+<*><<EOF>> {
+ // YY_USER_ACTION is not invoked for handling EOF.
+ // Set the location for EOF token manually.
+ pp::Input* input = &yyextra->input;
+ pp::Input::Location* scanLoc = &yyextra->scanLoc;
+ int sIndexMax = std::max(0, input->count() - 1);
+ if (scanLoc->sIndex != sIndexMax)
+ {
+ // We can only reach here if there are empty strings at the
+ // end of the input.
+ scanLoc->sIndex = sIndexMax; scanLoc->cIndex = 0;
+ yyfileno = sIndexMax; yylineno = 1;
+ }
+ yylloc->file = yyfileno;
+ yylloc->line = yylineno;
+ yylval->clear();
+
+ if (YY_START == COMMENT)
+ {
+ yyextra->diagnostics->report(pp::Diagnostics::EOF_IN_COMMENT,
+ pp::SourceLocation(yyfileno, yylineno),
+ "");
+ }
+ yyterminate();
+}
+
+%%
+
+namespace pp {
+
+// TODO(alokp): Maximum token length should ideally be specified by
+// the preprocessor client, i.e., the compiler.
+const size_t Tokenizer::kMaxTokenLength = 256;
+
+Tokenizer::Tokenizer(Diagnostics* diagnostics) : mHandle(0)
+{
+ mContext.diagnostics = diagnostics;
+}
+
+Tokenizer::~Tokenizer()
+{
+ destroyScanner();
+}
+
+bool Tokenizer::init(int count, const char* const string[], const int length[])
+{
+ if (count < 0) return false;
+ if ((count > 0) && (string == 0)) return false;
+
+ mContext.input = Input(count, string, length);
+ return initScanner();
+}
+
+void Tokenizer::setFileNumber(int file)
+{
+ // We use column number as file number.
+ // See macro yyfileno.
+ yyset_column(file, mHandle);
+}
+
+void Tokenizer::setLineNumber(int line)
+{
+ yyset_lineno(line, mHandle);
+}
+
+void Tokenizer::lex(Token* token)
+{
+ token->type = yylex(&token->text, &token->location, mHandle);
+ if (token->text.size() > kMaxTokenLength)
+ {
+ mContext.diagnostics->report(Diagnostics::TOKEN_TOO_LONG,
+ token->location, token->text);
+ token->text.erase(kMaxTokenLength);
+ }
+
+ token->flags = 0;
+
+ token->setAtStartOfLine(mContext.lineStart);
+ mContext.lineStart = token->type == '\n';
+
+ token->setHasLeadingSpace(mContext.leadingSpace);
+ mContext.leadingSpace = false;
+}
+
+bool Tokenizer::initScanner()
+{
+ if ((mHandle == NULL) && yylex_init_extra(&mContext, &mHandle))
+ return false;
+
+ yyrestart(0, mHandle);
+ return true;
+}
+
+void Tokenizer::destroyScanner()
+{
+ if (mHandle == NULL)
+ return;
+
+ yylex_destroy(mHandle);
+ mHandle = NULL;
+}
+
+} // namespace pp
+
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/numeric_lex.h b/src/3rdparty/angle/src/compiler/preprocessor/new/numeric_lex.h
new file mode 100644
index 0000000000..b04125d230
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/numeric_lex.h
@@ -0,0 +1,61 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+// numeric_lex.h: Functions to extract numeric values from string.
+
+#ifndef COMPILER_PREPROCESSOR_NUMERIC_LEX_H_
+#define COMPILER_PREPROCESSOR_NUMERIC_LEX_H_
+
+#include <sstream>
+
+namespace pp {
+
+inline std::ios::fmtflags numeric_base_int(const std::string& str)
+{
+ if ((str.size() >= 2) &&
+ (str[0] == '0') &&
+ (str[1] == 'x' || str[1] == 'X'))
+ {
+ return std::ios::hex;
+ }
+ else if ((str.size() >= 1) && (str[0] == '0'))
+ {
+ return std::ios::oct;
+ }
+ return std::ios::dec;
+}
+
+// The following functions parse the given string to extract a numerical
+// value of the given type. These functions assume that the string is
+// of the correct form. They can only fail if the parsed value is too big,
+// in which case false is returned.
+
+template<typename IntType>
+bool numeric_lex_int(const std::string& str, IntType* value)
+{
+ std::istringstream stream(str);
+ // This should not be necessary, but MSVS has a buggy implementation.
+ // It returns incorrect results if the base is not specified.
+ stream.setf(numeric_base_int(str), std::ios::basefield);
+
+ stream >> (*value);
+ return !stream.fail();
+}
+
+template<typename FloatType>
+bool numeric_lex_float(const std::string& str, FloatType* value)
+{
+ std::istringstream stream(str);
+ // Force "C" locale so that decimal character is always '.', and
+ // not dependent on the current locale.
+ stream.imbue(std::locale::classic());
+
+ stream >> (*value);
+ return !stream.fail();
+}
+
+} // namespace pp.
+#endif // COMPILER_PREPROCESSOR_NUMERIC_LEX_H_
diff --git a/src/3rdparty/angle/src/compiler/preprocessor/new/pp_utils.h b/src/3rdparty/angle/src/compiler/preprocessor/new/pp_utils.h
new file mode 100644
index 0000000000..17164ea8b0
--- /dev/null
+++ b/src/3rdparty/angle/src/compiler/preprocessor/new/pp_utils.h
@@ -0,0 +1,18 @@
+//
+// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+
+// pp_utils.h: Common preprocessor utilities
+
+#ifndef COMPILER_PREPROCESSOR_PPUTILS_H_
+#define COMPILER_PREPROCESSOR_PPUTILS_H_
+
+// A macro to disallow the copy constructor and operator= functions
+// This must be used in the private: declarations for a class.
+#define PP_DISALLOW_COPY_AND_ASSIGN(TypeName) \
+ TypeName(const TypeName&); \
+ void operator=(const TypeName&)
+
+#endif // COMPILER_PREPROCESSOR_PPUTILS_H_