aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRoberto Raggi <roberto.raggi@nokia.com>2012-05-31 17:32:49 +0200
committerRoberto Raggi <roberto.raggi@nokia.com>2012-05-31 17:32:49 +0200
commitd65f9ddff1fd9c1aac037bb1163c6fbc7e9a637c (patch)
tree6d88cc36fd94b3db95d3a9fbc05ad0351b1fd82c
parent8ccb1e0e683b8c047a7ccfa068392873d72ca6c4 (diff)
Generalized instruction selection.
This will simplify the instruction selection pass for different architectures.
-rw-r--r--main.cpp1
-rw-r--r--qv4ir_p.h36
-rw-r--r--qv4isel.cpp969
-rw-r--r--qv4isel_p.h401
-rw-r--r--qv4isel_x86_64.cpp971
-rw-r--r--qv4isel_x86_64_p.h54
-rw-r--r--v4.pro7
7 files changed, 1408 insertions, 1031 deletions
diff --git a/main.cpp b/main.cpp
index 29833a939f..87c9095bb8 100644
--- a/main.cpp
+++ b/main.cpp
@@ -2,6 +2,7 @@
#include "qmljs_objects.h"
#include "qv4codegen_p.h"
#include "qv4isel_p.h"
+#include "qv4isel_x86_64_p.h"
#include "qv4syntaxchecker_p.h"
#include "qv4ecmaobjects_p.h"
diff --git a/qv4ir_p.h b/qv4ir_p.h
index c60c5def4b..31914265cb 100644
--- a/qv4ir_p.h
+++ b/qv4ir_p.h
@@ -171,28 +171,28 @@ const char *typeName(IR::Type t);
struct ExprVisitor {
virtual ~ExprVisitor() {}
- virtual void visitConst(Const *) {}
- virtual void visitString(String *) {}
- virtual void visitName(Name *) {}
- virtual void visitTemp(Temp *) {}
- virtual void visitClosure(Closure *) {}
- virtual void visitUnop(Unop *) {}
- virtual void visitBinop(Binop *) {}
- virtual void visitCall(Call *) {}
- virtual void visitNew(New *) {}
- virtual void visitSubscript(Subscript *) {}
- virtual void visitMember(Member *) {}
+ virtual void visitConst(Const *) = 0;
+ virtual void visitString(String *) = 0;
+ virtual void visitName(Name *) = 0;
+ virtual void visitTemp(Temp *) = 0;
+ virtual void visitClosure(Closure *) = 0;
+ virtual void visitUnop(Unop *) = 0;
+ virtual void visitBinop(Binop *) = 0;
+ virtual void visitCall(Call *) = 0;
+ virtual void visitNew(New *) = 0;
+ virtual void visitSubscript(Subscript *) = 0;
+ virtual void visitMember(Member *) = 0;
};
struct StmtVisitor {
virtual ~StmtVisitor() {}
- virtual void visitExp(Exp *) {}
- virtual void visitEnter(Enter *) {}
- virtual void visitLeave(Leave *) {}
- virtual void visitMove(Move *) {}
- virtual void visitJump(Jump *) {}
- virtual void visitCJump(CJump *) {}
- virtual void visitRet(Ret *) {}
+ virtual void visitExp(Exp *) = 0;
+ virtual void visitEnter(Enter *) = 0;
+ virtual void visitLeave(Leave *) = 0;
+ virtual void visitMove(Move *) = 0;
+ virtual void visitJump(Jump *) = 0;
+ virtual void visitCJump(CJump *) = 0;
+ virtual void visitRet(Ret *) = 0;
};
struct Expr {
diff --git a/qv4isel.cpp b/qv4isel.cpp
index 8407f3f38e..1e833bb490 100644
--- a/qv4isel.cpp
+++ b/qv4isel.cpp
@@ -1,971 +1,2 @@
#include "qv4isel_p.h"
-#include "qmljs_runtime.h"
-#include "qmljs_objects.h"
-
-#define TARGET_AMD64
-#define g_assert assert
-typedef quint64 guint64;
-typedef qint64 gint64;
-typedef uchar guint8;
-typedef uint guint32;
-typedef void *gpointer;
-#include "amd64-codegen.h"
-
-#include <sys/mman.h>
-#include <iostream>
-#include <cassert>
-
-#ifndef NO_UDIS86
-# include <udis86.h>
-#endif
-
-using namespace QQmlJS;
-using namespace QQmlJS::x86_64;
-using namespace QQmlJS::VM;
-
-namespace {
-QTextStream qout(stdout, QIODevice::WriteOnly);
-}
-
-static inline void
-amd64_patch (unsigned char* code, gpointer target)
-{
- guint8 rex = 0;
-
-#ifdef __native_client_codegen__
- code = amd64_skip_nops (code);
-#endif
-#if defined(__native_client_codegen__) && defined(__native_client__)
- if (nacl_is_code_address (code)) {
- /* For tail calls, code is patched after being installed */
- /* but not through the normal "patch callsite" method. */
- unsigned char buf[kNaClAlignment];
- unsigned char *aligned_code = (uintptr_t)code & ~kNaClAlignmentMask;
- int ret;
- memcpy (buf, aligned_code, kNaClAlignment);
- /* Patch a temp buffer of bundle size, */
- /* then install to actual location. */
- amd64_patch (buf + ((uintptr_t)code - (uintptr_t)aligned_code), target);
- ret = nacl_dyncode_modify (aligned_code, buf, kNaClAlignment);
- g_assert (ret == 0);
- return;
- }
- target = nacl_modify_patch_target (target);
-#endif
-
- /* Skip REX */
- if ((code [0] >= 0x40) && (code [0] <= 0x4f)) {
- rex = code [0];
- code += 1;
- }
-
- if ((code [0] & 0xf8) == 0xb8) {
- /* amd64_set_reg_template */
- *(guint64*)(code + 1) = (guint64)target;
- }
- else if ((code [0] == 0x8b) && rex && x86_modrm_mod (code [1]) == 0 && x86_modrm_rm (code [1]) == 5) {
- /* mov 0(%rip), %dreg */
- *(guint32*)(code + 2) = (guint32)(guint64)target - 7;
- }
- else if ((code [0] == 0xff) && (code [1] == 0x15)) {
- /* call *<OFFSET>(%rip) */
- *(guint32*)(code + 2) = ((guint32)(guint64)target) - 7;
- }
- else if (code [0] == 0xe8) {
- /* call <DISP> */
- gint64 disp = (guint8*)target - (guint8*)code;
- assert (amd64_is_imm32 (disp));
- x86_patch (code, (unsigned char*)target);
- }
- else
- x86_patch (code, (unsigned char*)target);
-}
-InstructionSelection::InstructionSelection(VM::ExecutionEngine *engine, IR::Module *module, uchar *buffer)
- : _engine(engine)
- , _module(module)
- , _function(0)
- , _block(0)
- , _buffer(buffer)
- , _code(buffer)
- , _codePtr(buffer)
-{
-}
-
-InstructionSelection::~InstructionSelection()
-{
-}
-
-void InstructionSelection::operator()(IR::Function *function)
-{
- qSwap(_function, function);
-
- _code = _codePtr;
- _code = (uchar *) ((size_t(_code) + 15) & ~15);
- _function->code = (void (*)(VM::Context *)) _code;
- _codePtr = _code;
-
- int locals = (_function->tempCount - _function->locals.size() + _function->maxNumberOfArguments) * sizeof(Value);
- locals = (locals + 15) & ~15;
-
- amd64_push_reg(_codePtr, AMD64_RBP);
- amd64_push_reg(_codePtr, AMD64_R14);
- amd64_push_reg(_codePtr, AMD64_R15);
-
- amd64_mov_reg_reg(_codePtr, AMD64_RBP, AMD64_RSP, 8);
- amd64_mov_reg_reg(_codePtr, AMD64_R14, AMD64_RDI, 8);
- amd64_alu_reg_imm(_codePtr, X86_SUB, AMD64_RSP, locals);
-
- amd64_mov_reg_membase(_codePtr, AMD64_R15, AMD64_R14, offsetof(Context, locals), 8);
-
- foreach (IR::BasicBlock *block, _function->basicBlocks) {
- _block = block;
- _addrs[block] = _codePtr;
- foreach (IR::Stmt *s, block->statements) {
- s->accept(this);
- }
- }
-
- QHashIterator<IR::BasicBlock *, QVector<uchar *> > it(_patches);
- while (it.hasNext()) {
- it.next();
- uchar *target = _addrs[it.key()];
- foreach (uchar *instr, it.value()) {
- amd64_patch(instr, target);
- }
- }
-
- amd64_alu_reg_imm(_codePtr, X86_ADD, AMD64_RSP, locals);
- amd64_pop_reg(_codePtr, AMD64_R15);
- amd64_pop_reg(_codePtr, AMD64_R14);
- amd64_pop_reg(_codePtr, AMD64_RBP);
- amd64_ret(_codePtr);
-
-#ifndef NO_UDIS86
- static bool showCode = !qgetenv("SHOW_CODE").isNull();
- if (showCode) {
- printf("code size: %ld bytes\n", (_codePtr - _code));
- ud_t ud_obj;
-
- ud_init(&ud_obj);
- ud_set_input_buffer(&ud_obj, _code, _codePtr - _code);
- ud_set_mode(&ud_obj, 64);
- ud_set_syntax(&ud_obj, UD_SYN_ATT);
-
- while (ud_disassemble(&ud_obj)) {
- printf("\t%s\n", ud_insn_asm(&ud_obj));
- }
- }
-#endif
- qSwap(_function, _function);
-}
-
-String *InstructionSelection::identifier(const QString &s)
-{
- return _engine->identifier(s);
-}
-
-void InstructionSelection::loadTempAddress(int reg, IR::Temp *t)
-{
- if (t->index < 0) {
- const int arg = -t->index - 1;
- amd64_mov_reg_membase(_codePtr, reg, AMD64_R14, offsetof(Context, arguments), 8);
- amd64_lea_membase(_codePtr, reg, reg, sizeof(Value) * arg);
- } else if (t->index < _function->locals.size()) {
- amd64_lea_membase(_codePtr, reg, AMD64_R15, sizeof(Value) * t->index);
- } else {
- amd64_lea_membase(_codePtr, reg, AMD64_RSP, sizeof(Value) * (_function->maxNumberOfArguments + t->index - _function->locals.size()));
- }
-}
-
-void InstructionSelection::callActivationProperty(IR::Call *call, IR::Temp *result)
-{
- IR::Name *baseName = call->base->asName();
- assert(baseName != 0);
-
- int argc = 0;
- for (IR::ExprList *it = call->args; it; it = it->next) {
- ++argc;
- }
-
- int i = 0;
- for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
- IR::Temp *arg = it->expr->asTemp();
- assert(arg != 0);
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
- loadTempAddress(AMD64_RSI, arg);
- amd64_call_code(_codePtr, __qmljs_copy);
- }
-
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
-
- if (result)
- loadTempAddress(AMD64_RSI, result);
- else
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
-
- if (baseName->id) {
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*baseName->id));
- amd64_lea_membase(_codePtr, AMD64_RCX, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_R8, argc);
- amd64_call_code(_codePtr, __qmljs_call_activation_property);
- } else {
- switch (baseName->builtin) {
- case IR::Name::builtin_invalid:
- Q_UNREACHABLE();
- break;
- case IR::Name::builtin_typeof:
- amd64_lea_membase(_codePtr, AMD64_RDX, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, argc);
- amd64_call_code(_codePtr, __qmljs_builtin_typeof);
- break;
- case IR::Name::builtin_throw:
- amd64_lea_membase(_codePtr, AMD64_RDX, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, argc);
- amd64_call_code(_codePtr, __qmljs_builtin_throw);
- break;
- case IR::Name::builtin_rethrow:
- amd64_lea_membase(_codePtr, AMD64_RDX, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, argc);
- amd64_call_code(_codePtr, __qmljs_builtin_rethrow);
- return; // we need to return to avoid checking the exceptions
- }
- }
-
- checkExceptions();
-}
-
-
-void InstructionSelection::callValue(IR::Call *call, IR::Temp *result)
-{
- IR::Temp *baseTemp = call->base->asTemp();
- assert(baseTemp != 0);
-
- int argc = 0;
- for (IR::ExprList *it = call->args; it; it = it->next) {
- ++argc;
- }
-
- int i = 0;
- for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
- IR::Temp *arg = it->expr->asTemp();
- assert(arg != 0);
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
- loadTempAddress(AMD64_RSI, arg);
- amd64_call_code(_codePtr, __qmljs_copy);
- }
-
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
-
- if (result)
- loadTempAddress(AMD64_RSI, result);
- else
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
-
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RDX, AMD64_RDX);
- loadTempAddress(AMD64_RCX, baseTemp);
- amd64_lea_membase(_codePtr, AMD64_R8, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_R9, argc);
- amd64_call_code(_codePtr, __qmljs_call_value);
-
- checkExceptions();
-}
-
-void InstructionSelection::callProperty(IR::Call *call, IR::Temp *result)
-{
- IR::Member *member = call->base->asMember();
- assert(member != 0);
- assert(member->base->asTemp());
-
- int argc = 0;
- for (IR::ExprList *it = call->args; it; it = it->next) {
- ++argc;
- }
-
- int i = 0;
- for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
- IR::Temp *arg = it->expr->asTemp();
- assert(arg != 0);
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
- loadTempAddress(AMD64_RSI, arg);
- amd64_call_code(_codePtr, __qmljs_copy);
- }
-
- //__qmljs_call_property(ctx, result, base, name, args, argc);
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
-
- if (result)
- loadTempAddress(AMD64_RSI, result);
- else
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
-
- loadTempAddress(AMD64_RDX, member->base->asTemp());
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, identifier(*member->name));
- amd64_lea_membase(_codePtr, AMD64_R8, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_R9, argc);
- amd64_call_code(_codePtr, __qmljs_call_property);
-
- checkExceptions();
-}
-
-void InstructionSelection::constructActivationProperty(IR::New *call, IR::Temp *result)
-{
- IR::Name *baseName = call->base->asName();
- assert(baseName != 0);
-
- int argc = 0;
- for (IR::ExprList *it = call->args; it; it = it->next) {
- ++argc;
- }
-
- int i = 0;
- for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
- IR::Temp *arg = it->expr->asTemp();
- assert(arg != 0);
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
- loadTempAddress(AMD64_RSI, arg);
- amd64_call_code(_codePtr, __qmljs_copy);
- }
-
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
-
- if (result)
- loadTempAddress(AMD64_RSI, result);
- else
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
-
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*baseName->id));
- amd64_lea_membase(_codePtr, AMD64_RCX, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_R8, argc);
- amd64_call_code(_codePtr, __qmljs_construct_activation_property);
-
- checkExceptions();
-}
-
-void InstructionSelection::constructProperty(IR::New *call, IR::Temp *result)
-{
- IR::Member *member = call->base->asMember();
- assert(member != 0);
- assert(member->base->asTemp());
-
- int argc = 0;
- for (IR::ExprList *it = call->args; it; it = it->next) {
- ++argc;
- }
-
- int i = 0;
- for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
- IR::Temp *arg = it->expr->asTemp();
- assert(arg != 0);
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
- loadTempAddress(AMD64_RSI, arg);
- amd64_call_code(_codePtr, __qmljs_copy);
- }
-
- //__qmljs_call_property(ctx, result, base, name, args, argc);
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
-
- if (result)
- loadTempAddress(AMD64_RSI, result);
- else
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
-
- loadTempAddress(AMD64_RDX, member->base->asTemp());
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, identifier(*member->name));
- amd64_lea_membase(_codePtr, AMD64_R8, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_R9, argc);
- amd64_call_code(_codePtr, __qmljs_construct_property);
-
- checkExceptions();
-}
-
-void InstructionSelection::constructValue(IR::New *call, IR::Temp *result)
-{
- IR::Temp *baseTemp = call->base->asTemp();
- assert(baseTemp != 0);
-
- int argc = 0;
- for (IR::ExprList *it = call->args; it; it = it->next) {
- ++argc;
- }
-
- int i = 0;
- for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
- IR::Temp *arg = it->expr->asTemp();
- assert(arg != 0);
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
- loadTempAddress(AMD64_RSI, arg);
- amd64_call_code(_codePtr, __qmljs_copy);
- }
-
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
-
- if (result)
- loadTempAddress(AMD64_RSI, result);
- else
- amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
-
- loadTempAddress(AMD64_RDX, baseTemp);
- amd64_lea_membase(_codePtr, AMD64_RCX, AMD64_RSP, 0);
- amd64_mov_reg_imm(_codePtr, AMD64_R8, argc);
- amd64_call_code(_codePtr, __qmljs_construct_value);
-}
-
-void InstructionSelection::checkExceptions()
-{
- amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_R14, offsetof(Context, hasUncaughtException), 4);
- amd64_alu_reg_imm_size(_codePtr, X86_CMP, AMD64_RAX, 1, 1);
- _patches[_function->handlersBlock].append(_codePtr);
- amd64_branch32(_codePtr, X86_CC_E, 0, 1);
-}
-
-void InstructionSelection::visitExp(IR::Exp *s)
-{
- if (IR::Call *c = s->expr->asCall()) {
- if (c->base->asName()) {
- callActivationProperty(c, 0);
- return;
- } else if (c->base->asTemp()) {
- callValue(c, 0);
- return;
- } else if (c->base->asMember()) {
- callProperty(c, 0);
- return;
- }
- }
- Q_UNIMPLEMENTED();
- assert(!"TODO");
-}
-
-void InstructionSelection::visitEnter(IR::Enter *)
-{
- Q_UNIMPLEMENTED();
- assert(!"TODO");
-}
-
-void InstructionSelection::visitLeave(IR::Leave *)
-{
- Q_UNIMPLEMENTED();
- assert(!"TODO");
-}
-
-void InstructionSelection::visitMove(IR::Move *s)
-{
- // %rdi, %rsi, %rdx, %rcx, %r8 and %r9
- if (s->op == IR::OpInvalid) {
- if (IR::Name *n = s->target->asName()) {
- String *propertyName = identifier(*n->id);
-
- if (IR::Const *c = s->source->asConst()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
-
- switch (c->type) {
- case IR::BoolType:
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, c->value != 0);
- amd64_call_code(_codePtr, __qmljs_set_activation_property_boolean);
- break;
-
- case IR::NumberType:
- amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
- amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
- amd64_call_code(_codePtr, __qmljs_set_activation_property_number);
- break;
-
- default:
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- }
- } else if (IR::String *str = s->source->asString()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, _engine->newString(*str->value));
- amd64_call_code(_codePtr, __qmljs_set_activation_property_string);
- } else if (IR::Temp *t = s->source->asTemp()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
- loadTempAddress(AMD64_RDX, t);
- amd64_call_code(_codePtr, __qmljs_set_activation_property);
- } else if (IR::Name *other = s->source->asName()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*other->id));
- amd64_call_code(_codePtr, __qmljs_copy_activation_property);
- } else if (IR::Closure *clos = s->source->asClosure()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, clos->value);
- amd64_call_code(_codePtr, __qmljs_set_activation_property_closure);
- } else {
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- }
-
- checkExceptions();
- return;
- } else if (IR::Temp *t = s->target->asTemp()) {
- if (IR::Name *n = s->source->asName()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- if (*n->id == QStringLiteral("this")) { // ### `this' should be a builtin.
- amd64_call_code(_codePtr, __qmljs_get_thisObject);
- } else {
- String *propertyName = identifier(*n->id);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, propertyName);
- amd64_call_code(_codePtr, __qmljs_get_activation_property);
- checkExceptions();
- }
- return;
- } else if (IR::Const *c = s->source->asConst()) {
- loadTempAddress(AMD64_RSI, t);
-
- switch (c->type) {
- case IR::NullType:
- amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, NULL_TYPE, 4);
- break;
-
- case IR::UndefinedType:
- amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, UNDEFINED_TYPE, 4);
- break;
-
- case IR::BoolType:
- amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, BOOLEAN_TYPE, 4);
- amd64_mov_membase_imm(_codePtr, AMD64_RSI, offsetof(Value, booleanValue), c->value != 0, 1);
- break;
-
- case IR::NumberType:
- amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
- amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
- amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, NUMBER_TYPE, 4);
- amd64_movsd_membase_reg(_codePtr, AMD64_RSI, offsetof(Value, numberValue), AMD64_XMM0);
- break;
-
- default:
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- }
- return;
- } else if (IR::Temp *t2 = s->source->asTemp()) {
- loadTempAddress(AMD64_RDI, t);
- loadTempAddress(AMD64_RSI, t2);
- amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, 0, 4);
- amd64_mov_membase_reg(_codePtr, AMD64_RDI, 0, AMD64_RAX, 4);
- amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, offsetof(Value, numberValue), 8);
- amd64_mov_membase_reg(_codePtr, AMD64_RDI, offsetof(Value, numberValue), AMD64_RAX, 8);
- return;
- } else if (IR::String *str = s->source->asString()) {
- loadTempAddress(AMD64_RDI, t);
- amd64_mov_reg_imm(_codePtr, AMD64_RSI, _engine->newString(*str->value));
- amd64_call_code(_codePtr, __qmljs_init_string);
- return;
- } else if (IR::Closure *clos = s->source->asClosure()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, clos->value);
- amd64_call_code(_codePtr, __qmljs_init_closure);
- return;
- } else if (IR::New *ctor = s->source->asNew()) {
- if (ctor->base->asName()) {
- constructActivationProperty(ctor, t);
- return;
- } else if (ctor->base->asMember()) {
- constructProperty(ctor, t);
- return;
- } else if (ctor->base->asTemp()) {
- constructValue(ctor, t);
- return;
- }
- } else if (IR::Member *m = s->source->asMember()) {
- //__qmljs_get_property(ctx, result, object, name);
- if (IR::Temp *base = m->base->asTemp()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- loadTempAddress(AMD64_RDX, base);
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, identifier(*m->name));
- amd64_call_code(_codePtr, __qmljs_get_property);
- checkExceptions();
- return;
- }
- assert(!"wip");
- return;
- } else if (IR::Subscript *ss = s->source->asSubscript()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- loadTempAddress(AMD64_RDX, ss->base->asTemp());
- loadTempAddress(AMD64_RCX, ss->index->asTemp());
- amd64_call_code(_codePtr, __qmljs_get_element);
- checkExceptions();
- return;
- } else if (IR::Unop *u = s->source->asUnop()) {
- if (IR::Temp *e = u->expr->asTemp()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- loadTempAddress(AMD64_RDX, e);
- void (*op)(Context *, Value *, const Value *) = 0;
- switch (u->op) {
- case IR::OpIfTrue: assert(!"unreachable"); break;
- case IR::OpNot: op = __qmljs_not; break;
- case IR::OpUMinus: op = __qmljs_uminus; break;
- case IR::OpUPlus: op = __qmljs_uplus; break;
- case IR::OpCompl: op = __qmljs_compl; break;
- default: assert(!"unreachable"); break;
- } // switch
- amd64_call_code(_codePtr, op);
- return;
- } else if (IR::Const *c = u->expr->asConst()) {
- assert(!"wip");
- return;
- }
- } else if (IR::Binop *b = s->source->asBinop()) {
- IR::Temp *l = b->left->asTemp();
- IR::Temp *r = b->right->asTemp();
- if (l && r) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- loadTempAddress(AMD64_RDX, l);
- loadTempAddress(AMD64_RCX, r);
-
- uchar *label1 = 0, *label2 = 0, *label3 = 0;
-
- if (b->op == IR::OpMul || b->op == IR::OpAdd || b->op == IR::OpSub || b->op == IR::OpDiv) {
- amd64_alu_membase_imm_size(_codePtr, X86_CMP, AMD64_RDX, 0, NUMBER_TYPE, 4);
- label1 = _codePtr;
- amd64_branch8(_codePtr, X86_CC_NE, 0, 0);
- amd64_alu_membase_imm_size(_codePtr, X86_CMP, AMD64_RCX, 0, NUMBER_TYPE, 4);
- label2 = _codePtr;
- amd64_branch8(_codePtr, X86_CC_NE, 0, 0);
- amd64_movsd_reg_membase(_codePtr, AMD64_XMM0, AMD64_RDX, offsetof(Value, numberValue));
- amd64_movsd_reg_membase(_codePtr, AMD64_XMM1, AMD64_RCX, offsetof(Value, numberValue));
- switch (b->op) {
- case IR::OpAdd:
- amd64_sse_addsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
- break;
- case IR::OpSub:
- amd64_sse_subsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
- break;
- case IR::OpMul:
- amd64_sse_mulsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
- break;
- case IR::OpDiv:
- amd64_sse_divsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
- break;
- default:
- Q_UNREACHABLE();
- } // switch
-
- amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, NUMBER_TYPE, 4);
- amd64_movsd_membase_reg(_codePtr, AMD64_RSI, offsetof(Value, numberValue), AMD64_XMM0);
- label3 = _codePtr;
- amd64_jump32(_codePtr, 0);
- }
-
-
- if (label1 && label2) {
- amd64_patch(label1, _codePtr);
- amd64_patch(label2, _codePtr);
- }
-
- void (*op)(Context *, Value *, const Value *, const Value *) = 0;
-
- switch ((IR::AluOp) b->op) {
- case IR::OpInvalid:
- case IR::OpIfTrue:
- case IR::OpNot:
- case IR::OpUMinus:
- case IR::OpUPlus:
- case IR::OpCompl:
- assert(!"unreachable");
- break;
-
- case IR::OpBitAnd: op = __qmljs_bit_and; break;
- case IR::OpBitOr: op = __qmljs_bit_or; break;
- case IR::OpBitXor: op = __qmljs_bit_xor; break;
- case IR::OpAdd: op = __qmljs_add; break;
- case IR::OpSub: op = __qmljs_sub; break;
- case IR::OpMul: op = __qmljs_mul; break;
- case IR::OpDiv: op = __qmljs_div; break;
- case IR::OpMod: op = __qmljs_mod; break;
- case IR::OpLShift: op = __qmljs_shl; break;
- case IR::OpRShift: op = __qmljs_shr; break;
- case IR::OpURShift: op = __qmljs_ushr; break;
- case IR::OpGt: op = __qmljs_gt; break;
- case IR::OpLt: op = __qmljs_lt; break;
- case IR::OpGe: op = __qmljs_ge; break;
- case IR::OpLe: op = __qmljs_le; break;
- case IR::OpEqual: op = __qmljs_eq; break;
- case IR::OpNotEqual: op = __qmljs_ne; break;
- case IR::OpStrictEqual: op = __qmljs_se; break;
- case IR::OpStrictNotEqual: op = __qmljs_sne; break;
- case IR::OpInstanceof: op = __qmljs_instanceof; break;
-
- case IR::OpIn:
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- break;
-
- case IR::OpAnd:
- case IR::OpOr:
- assert(!"unreachable");
- break;
- }
- amd64_call_code(_codePtr, op);
- if (label3)
- amd64_patch(label3, _codePtr);
- return;
- }
- } else if (IR::Call *c = s->source->asCall()) {
- if (c->base->asName()) {
- callActivationProperty(c, t);
- return;
- } else if (c->base->asMember()) {
- callProperty(c, t);
- return;
- } else if (c->base->asTemp()) {
- callValue(c, t);
- return;
- }
- }
- } else if (IR::Member *m = s->target->asMember()) {
- if (IR::Temp *base = m->base->asTemp()) {
- if (IR::Const *c = s->source->asConst()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, base);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
- amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
- amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
- amd64_call_code(_codePtr, __qmljs_set_property_number);
- } else if (IR::String *str = s->source->asString()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, base);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, _engine->newString(*str->value));
- amd64_call_code(_codePtr, __qmljs_set_property_string);
- } else if (IR::Temp *t = s->source->asTemp()) {
- // __qmljs_set_property(ctx, object, name, value);
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, base);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
- loadTempAddress(AMD64_RCX, t);
- amd64_call_code(_codePtr, __qmljs_set_property);
- } else if (IR::Closure *clos = s->source->asClosure()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, base);
- amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
- amd64_mov_reg_imm(_codePtr, AMD64_RCX, clos->value);
- amd64_call_code(_codePtr, __qmljs_set_property_closure);
- } else {
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- }
- checkExceptions();
- return;
- }
- } else if (IR::Subscript *ss = s->target->asSubscript()) {
- if (IR::Temp *t2 = s->source->asTemp()) {
- loadTempAddress(AMD64_RSI, ss->base->asTemp());
- loadTempAddress(AMD64_RDX, ss->index->asTemp());
- loadTempAddress(AMD64_RCX, t2);
- amd64_call_code(_codePtr, __qmljs_set_element);
- } else if (IR::Const *c = s->source->asConst()) {
- if (c->type == IR::NumberType) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, ss->base->asTemp());
- loadTempAddress(AMD64_RDX, ss->index->asTemp());
- amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
- amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
- amd64_call_code(_codePtr, __qmljs_set_element_number);
- } else {
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- }
- } else {
- Q_UNIMPLEMENTED();
- assert(!"TODO");
- }
- checkExceptions();
- return;
- }
- } else {
- // inplace assignment, e.g. x += 1, ++x, ...
- if (IR::Temp *t = s->target->asTemp()) {
- if (IR::Const *c = s->source->asConst()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
- amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
-
- void (*op)(Context *, Value *, double);
- switch (s->op) {
- case IR::OpBitAnd: op = __qmljs_inplace_bit_and; break;
- case IR::OpBitOr: op = __qmljs_inplace_bit_or; break;
- case IR::OpBitXor: op = __qmljs_inplace_bit_xor; break;
- case IR::OpAdd: op = __qmljs_inplace_add; break;
- case IR::OpSub: op = __qmljs_inplace_sub; break;
- case IR::OpMul: op = __qmljs_inplace_mul; break;
- case IR::OpDiv: op = __qmljs_inplace_div; break;
- case IR::OpMod: op = __qmljs_inplace_mod; break;
- case IR::OpLShift: op = __qmljs_inplace_shl; break;
- case IR::OpRShift: op = __qmljs_inplace_shr; break;
- case IR::OpURShift: op = __qmljs_inplace_ushr; break;
- default:
- Q_UNREACHABLE();
- break;
- }
-
- amd64_call_code(_codePtr, op);
- return;
- } else if (IR::Temp *t2 = s->source->asTemp()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
- amd64_mov_reg_reg(_codePtr, AMD64_RDX, AMD64_RSI, 8);
- loadTempAddress(AMD64_RCX, t2);
- void (*op)(Context *, Value *, const Value *, const Value *);
- switch (s->op) {
- case IR::OpBitAnd: op = __qmljs_bit_and; break;
- case IR::OpBitOr: op = __qmljs_bit_or; break;
- case IR::OpBitXor: op = __qmljs_bit_xor; break;
- case IR::OpAdd: op = __qmljs_add; break;
- case IR::OpSub: op = __qmljs_sub; break;
- case IR::OpMul: op = __qmljs_mul; break;
- case IR::OpDiv: op = __qmljs_div; break;
- case IR::OpMod: op = __qmljs_mod; break;
- case IR::OpLShift: op = __qmljs_shl; break;
- case IR::OpRShift: op = __qmljs_shr; break;
- case IR::OpURShift: op = __qmljs_ushr; break;
- default:
- Q_UNREACHABLE();
- break;
- }
-
- amd64_call_code(_codePtr, op);
- return;
- }
- } else if (IR::Name *n = s->target->asName()) {
- if (IR::Const *c = s->source->asConst()) {
- assert(!"wip");
- return;
- } else if (IR::Temp *t = s->source->asTemp()) {
- assert(!"wip");
- return;
- }
- } else if (IR::Subscript *ss = s->target->asSubscript()) {
- if (IR::Const *c = s->source->asConst()) {
- assert(!"wip");
- return;
- } else if (IR::Temp *t = s->source->asTemp()) {
- assert(!"wip");
- return;
- }
- } else if (IR::Member *m = s->target->asMember()) {
- if (IR::Const *c = s->source->asConst()) {
- assert(!"wip");
- return;
- } else if (IR::Temp *t = s->source->asTemp()) {
- assert(!"wip");
- return;
- }
- }
- }
-
- Q_UNIMPLEMENTED();
- s->dump(qout, IR::Stmt::MIR);
- qout << endl;
- assert(!"TODO");
-}
-
-void InstructionSelection::visitJump(IR::Jump *s)
-{
- if (_block->index + 1 != s->target->index) {
- _patches[s->target].append(_codePtr);
- amd64_jump32(_codePtr, 0);
- }
-}
-
-void InstructionSelection::visitCJump(IR::CJump *s)
-{
- if (IR::Temp *t = s->cond->asTemp()) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, t);
-
- amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, 0, 4);
- amd64_alu_reg_imm(_codePtr, X86_CMP, AMD64_RAX, BOOLEAN_TYPE);
-
- uchar *label1 = _codePtr;
- amd64_branch8(_codePtr, X86_CC_NE, 0, 0);
-
- amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, offsetof(Value, booleanValue), 1);
-
- uchar *label2 = _codePtr;
- amd64_jump8(_codePtr, 0);
-
- amd64_patch(label1, _codePtr);
- amd64_call_code(_codePtr, __qmljs_to_boolean);
-
- amd64_patch(label2, _codePtr);
- amd64_alu_reg_imm_size(_codePtr, X86_CMP, AMD64_RAX, 0, 4);
- _patches[s->iftrue].append(_codePtr);
- amd64_branch32(_codePtr, X86_CC_NZ, 0, 1);
-
- if (_block->index + 1 != s->iffalse->index) {
- _patches[s->iffalse].append(_codePtr);
- amd64_jump32(_codePtr, 0);
- }
- return;
- } else if (IR::Binop *b = s->cond->asBinop()) {
- IR::Temp *l = b->left->asTemp();
- IR::Temp *r = b->right->asTemp();
- if (l && r) {
- amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
- loadTempAddress(AMD64_RSI, l);
- loadTempAddress(AMD64_RDX, r);
-
- // ### TODO: instruction selection for common cases (e.g. number1 < number2)
-
- bool (*op)(Context *, const Value *, const Value *);
- switch (b->op) {
- default: Q_UNREACHABLE(); assert(!"todo"); break;
- case IR::OpGt: op = __qmljs_cmp_gt; break;
- case IR::OpLt: op = __qmljs_cmp_lt; break;
- case IR::OpGe: op = __qmljs_cmp_ge; break;
- case IR::OpLe: op = __qmljs_cmp_le; break;
- case IR::OpEqual: op = __qmljs_cmp_eq; break;
- case IR::OpNotEqual: op = __qmljs_cmp_ne; break;
- case IR::OpStrictEqual: op = __qmljs_cmp_se; break;
- case IR::OpStrictNotEqual: op = __qmljs_cmp_sne; break;
- case IR::OpInstanceof: op = __qmljs_cmp_instanceof; break;
- case IR::OpIn: op = __qmljs_cmp_in; break;
- } // switch
-
- amd64_call_code(_codePtr, op);
- x86_alu_reg_imm(_codePtr, X86_CMP, X86_EAX, 0);
-
- _patches[s->iftrue].append(_codePtr);
- amd64_branch32(_codePtr, X86_CC_NZ, 0, 1);
-
- if (_block->index + 1 != s->iffalse->index) {
- _patches[s->iffalse].append(_codePtr);
- amd64_jump32(_codePtr, 0);
- }
-
- return;
- } else {
- assert(!"wip");
- }
- }
-
- Q_UNIMPLEMENTED();
- assert(!"TODO");
-}
-
-void InstructionSelection::visitRet(IR::Ret *s)
-{
- if (IR::Temp *t = s->expr->asTemp()) {
- amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_R14, offsetof(Context, result));
- loadTempAddress(AMD64_RSI, t);
- amd64_call_code(_codePtr, __qmljs_copy);
- return;
- }
- Q_UNIMPLEMENTED();
- Q_UNUSED(s);
-}
-
diff --git a/qv4isel_p.h b/qv4isel_p.h
index d53d028c50..b12f71646a 100644
--- a/qv4isel_p.h
+++ b/qv4isel_p.h
@@ -1,54 +1,373 @@
-#ifndef QV4ISEL_P_H
-#define QV4ISEL_P_H
+#ifndef QV4ISEL_H
+#define QV4ISEL_H
#include "qv4ir_p.h"
-#include "qmljs_objects.h"
-
-#include <QtCore/QHash>
namespace QQmlJS {
-namespace x86_64 {
-class InstructionSelection: protected IR::StmtVisitor
+class BaseInstructionSelection: protected IR::StmtVisitor
{
+protected:
+ struct DispatchExp: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Exp *_stmt;
+
+ DispatchExp(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Exp *s)
+ {
+ qSwap(_stmt, s);
+ _stmt->expr->accept(this);
+ qSwap(_stmt, s);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genExpConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genExpString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genExpName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genExpTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genExpClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genExpUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genExpBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genExpCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genExpNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genExpSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genExpMember(_stmt); }
+ };
+
+ struct DispatchRet: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Ret *_stmt;
+
+ DispatchRet(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Ret *s)
+ {
+ qSwap(_stmt, s);
+ _stmt->expr->accept(this);
+ qSwap(_stmt, s);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genRetConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genRetString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genRetName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genRetTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genRetClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genRetUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genRetBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genRetCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genRetNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genRetSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genRetMember(_stmt); }
+ };
+
+ struct DispatchMove: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Move *_stmt;
+
+ DispatchMove(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Move *stmt)
+ {
+ qSwap(_stmt, stmt);
+ _stmt->target->accept(this);
+ qSwap(_stmt, stmt);
+ }
+
+ virtual void visitConst(IR::Const *) { Q_UNREACHABLE(); }
+ virtual void visitString(IR::String *) { Q_UNREACHABLE(); }
+ virtual void visitName(IR::Name *) { _isel->moveName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->moveTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { Q_UNREACHABLE(); }
+ virtual void visitUnop(IR::Unop *) { Q_UNREACHABLE(); }
+ virtual void visitBinop(IR::Binop *) { Q_UNREACHABLE(); }
+ virtual void visitCall(IR::Call *) { Q_UNREACHABLE(); }
+ virtual void visitNew(IR::New *) { Q_UNREACHABLE(); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->moveSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->moveMember(_stmt); }
+ };
+
+ struct MoveTemp: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Move *_stmt;
+
+ MoveTemp(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Move *stmt)
+ {
+ qSwap(_stmt, stmt);
+ _stmt->source->accept(this);
+ qSwap(_stmt, stmt);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genMoveTempConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genMoveTempString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genMoveTempName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genMoveTempTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genMoveTempClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genMoveTempUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genMoveTempBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genMoveTempCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genMoveTempNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genMoveTempSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genMoveTempMember(_stmt); }
+ };
+
+ struct MoveName: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Move *_stmt;
+
+ MoveName(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Move *stmt)
+ {
+ qSwap(_stmt, stmt);
+ _stmt->source->accept(this);
+ qSwap(_stmt, stmt);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genMoveNameConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genMoveNameString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genMoveNameName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genMoveNameTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genMoveNameClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genMoveNameUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genMoveNameBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genMoveNameCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genMoveNameNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genMoveNameSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genMoveNameMember(_stmt); }
+ };
+
+ struct MoveMember: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Move *_stmt;
+
+ MoveMember(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Move *stmt)
+ {
+ qSwap(_stmt, stmt);
+ _stmt->source->accept(this);
+ qSwap(_stmt, stmt);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genMoveMemberConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genMoveMemberString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genMoveMemberName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genMoveMemberTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genMoveMemberClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genMoveMemberUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genMoveMemberBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genMoveMemberCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genMoveMemberNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genMoveMemberSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genMoveMemberMember(_stmt); }
+ };
+
+ struct MoveSubscript: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::Move *_stmt;
+
+ MoveSubscript(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::Move *stmt)
+ {
+ qSwap(_stmt, stmt);
+ _stmt->source->accept(this);
+ qSwap(_stmt, stmt);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genMoveSubscriptConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genMoveSubscriptString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genMoveSubscriptName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genMoveSubscriptTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genMoveSubscriptClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genMoveSubscriptUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genMoveSubscriptBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genMoveSubscriptCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genMoveSubscriptNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genMoveSubscriptSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genMoveSubscriptMember(_stmt); }
+ };
+
+ struct DispatchCJump: IR::ExprVisitor {
+ BaseInstructionSelection *_isel;
+ IR::CJump *_stmt;
+
+ DispatchCJump(BaseInstructionSelection *isel)
+ : _isel(isel), _stmt(0) {}
+
+ void operator()(IR::CJump *s)
+ {
+ qSwap(_stmt, s);
+ _stmt->cond->accept(this);
+ qSwap(_stmt, s);
+ }
+
+ virtual void visitConst(IR::Const *) { _isel->genCJumpConst(_stmt); }
+ virtual void visitString(IR::String *) { _isel->genCJumpString(_stmt); }
+ virtual void visitName(IR::Name *) { _isel->genCJumpName(_stmt); }
+ virtual void visitTemp(IR::Temp *) { _isel->genCJumpTemp(_stmt); }
+ virtual void visitClosure(IR::Closure *) { _isel->genCJumpClosure(_stmt); }
+ virtual void visitUnop(IR::Unop *) { _isel->genCJumpUnop(_stmt); }
+ virtual void visitBinop(IR::Binop *) { _isel->genCJumpBinop(_stmt); }
+ virtual void visitCall(IR::Call *) { _isel->genCJumpCall(_stmt); }
+ virtual void visitNew(IR::New *) { _isel->genCJumpNew(_stmt); }
+ virtual void visitSubscript(IR::Subscript *) { _isel->genCJumpSubscript(_stmt); }
+ virtual void visitMember(IR::Member *) { _isel->genCJumpMember(_stmt); }
+ };
+
+ virtual void visitExp(IR::Exp *s)
+ {
+ dispatchExp(s);
+ }
+
+ virtual void visitEnter(IR::Enter *)
+ {
+ Q_UNREACHABLE();
+ }
+
+ virtual void visitLeave(IR::Leave *)
+ {
+ Q_UNREACHABLE();
+ }
+
+ virtual void visitMove(IR::Move *s)
+ {
+ dispatchMove(s);
+ }
+
+ virtual void visitJump(IR::Jump *s)
+ {
+ genJump(s);
+ }
+
+ virtual void visitCJump(IR::CJump *s)
+ {
+ dispatchCJump(s);
+ }
+
+ virtual void visitRet(IR::Ret *s)
+ {
+ dispatchRet(s);
+ }
+
+ DispatchExp dispatchExp;
+ DispatchRet dispatchRet;
+ DispatchCJump dispatchCJump;
+ DispatchMove dispatchMove;
+ MoveTemp moveTemp;
+ MoveName moveName;
+ MoveMember moveMember;
+ MoveSubscript moveSubscript;
+
public:
- InstructionSelection(VM::ExecutionEngine *engine, IR::Module *module, uchar *code);
- ~InstructionSelection();
+ BaseInstructionSelection()
+ : dispatchExp(this)
+ , dispatchRet(this)
+ , dispatchCJump(this)
+ , dispatchMove(this)
+ , moveTemp(this)
+ , moveName(this)
+ , moveMember(this)
+ , moveSubscript(this) {}
- void operator()(IR::Function *function);
+ void statement(IR::Stmt *s) { s->accept(this); }
-protected:
- VM::String *identifier(const QString &s);
- void loadTempAddress(int reg, IR::Temp *t);
- void callActivationProperty(IR::Call *call, IR::Temp *result);
- void callProperty(IR::Call *call, IR::Temp *result);
- void constructActivationProperty(IR::New *call, IR::Temp *result);
- void constructProperty(IR::New *ctor, IR::Temp *result);
- void callValue(IR::Call *call, IR::Temp *result);
- void constructValue(IR::New *call, IR::Temp *result);
- void checkExceptions();
-
- virtual void visitExp(IR::Exp *);
- virtual void visitEnter(IR::Enter *);
- virtual void visitLeave(IR::Leave *);
- virtual void visitMove(IR::Move *);
- virtual void visitJump(IR::Jump *);
- virtual void visitCJump(IR::CJump *);
- virtual void visitRet(IR::Ret *);
-
-private:
- VM::ExecutionEngine *_engine;
- IR::Module *_module;
- IR::Function *_function;
- IR::BasicBlock *_block;
- uchar *_buffer;
- uchar *_code;
- uchar *_codePtr;
- QHash<IR::BasicBlock *, QVector<uchar *> > _patches;
- QHash<IR::BasicBlock *, uchar *> _addrs;
+ virtual void genExpConst(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpString(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpName(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpTemp(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpClosure(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpUnop(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpBinop(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpCall(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpNew(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpSubscript(IR::Exp *) { Q_UNIMPLEMENTED(); }
+ virtual void genExpMember(IR::Exp *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genMoveTempConst(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempString(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempName(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempTemp(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempClosure(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempUnop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempBinop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempCall(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempNew(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempSubscript(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveTempMember(IR::Move *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genMoveNameConst(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameString(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameName(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameTemp(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameClosure(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameUnop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameBinop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameCall(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameNew(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameSubscript(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveNameMember(IR::Move *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genMoveMemberConst(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberString(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberName(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberTemp(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberClosure(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberUnop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberBinop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberCall(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberNew(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberSubscript(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveMemberMember(IR::Move *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genMoveSubscriptConst(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptString(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptName(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptTemp(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptClosure(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptUnop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptBinop(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptCall(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptNew(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptSubscript(IR::Move *) { Q_UNIMPLEMENTED(); }
+ virtual void genMoveSubscriptMember(IR::Move *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genJump(IR::Jump *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genCJumpConst(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpString(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpName(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpTemp(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpClosure(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpUnop(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpBinop(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpCall(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpNew(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpSubscript(IR::CJump *) { Q_UNIMPLEMENTED(); }
+ virtual void genCJumpMember(IR::CJump *) { Q_UNIMPLEMENTED(); }
+
+ virtual void genRetConst(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetString(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetName(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetTemp(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetClosure(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetUnop(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetBinop(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetCall(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetNew(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetSubscript(IR::Ret *) { Q_UNIMPLEMENTED(); }
+ virtual void genRetMember(IR::Ret *) { Q_UNIMPLEMENTED(); }
};
-} // end of namespace x86_64
} // end of namespace QQmlJS
-#endif // QV4ISEL_P_H
+#endif // QV4ISEL_H
diff --git a/qv4isel_x86_64.cpp b/qv4isel_x86_64.cpp
new file mode 100644
index 0000000000..02f53aeff4
--- /dev/null
+++ b/qv4isel_x86_64.cpp
@@ -0,0 +1,971 @@
+
+#include "qv4isel_x86_64_p.h"
+#include "qmljs_runtime.h"
+#include "qmljs_objects.h"
+
+#define TARGET_AMD64
+#define g_assert assert
+typedef quint64 guint64;
+typedef qint64 gint64;
+typedef uchar guint8;
+typedef uint guint32;
+typedef void *gpointer;
+#include "amd64-codegen.h"
+
+#include <sys/mman.h>
+#include <iostream>
+#include <cassert>
+
+#ifndef NO_UDIS86
+# include <udis86.h>
+#endif
+
+using namespace QQmlJS;
+using namespace QQmlJS::x86_64;
+using namespace QQmlJS::VM;
+
+namespace {
+QTextStream qout(stdout, QIODevice::WriteOnly);
+}
+
+static inline void
+amd64_patch (unsigned char* code, gpointer target)
+{
+ guint8 rex = 0;
+
+#ifdef __native_client_codegen__
+ code = amd64_skip_nops (code);
+#endif
+#if defined(__native_client_codegen__) && defined(__native_client__)
+ if (nacl_is_code_address (code)) {
+ /* For tail calls, code is patched after being installed */
+ /* but not through the normal "patch callsite" method. */
+ unsigned char buf[kNaClAlignment];
+ unsigned char *aligned_code = (uintptr_t)code & ~kNaClAlignmentMask;
+ int ret;
+ memcpy (buf, aligned_code, kNaClAlignment);
+ /* Patch a temp buffer of bundle size, */
+ /* then install to actual location. */
+ amd64_patch (buf + ((uintptr_t)code - (uintptr_t)aligned_code), target);
+ ret = nacl_dyncode_modify (aligned_code, buf, kNaClAlignment);
+ g_assert (ret == 0);
+ return;
+ }
+ target = nacl_modify_patch_target (target);
+#endif
+
+ /* Skip REX */
+ if ((code [0] >= 0x40) && (code [0] <= 0x4f)) {
+ rex = code [0];
+ code += 1;
+ }
+
+ if ((code [0] & 0xf8) == 0xb8) {
+ /* amd64_set_reg_template */
+ *(guint64*)(code + 1) = (guint64)target;
+ }
+ else if ((code [0] == 0x8b) && rex && x86_modrm_mod (code [1]) == 0 && x86_modrm_rm (code [1]) == 5) {
+ /* mov 0(%rip), %dreg */
+ *(guint32*)(code + 2) = (guint32)(guint64)target - 7;
+ }
+ else if ((code [0] == 0xff) && (code [1] == 0x15)) {
+ /* call *<OFFSET>(%rip) */
+ *(guint32*)(code + 2) = ((guint32)(guint64)target) - 7;
+ }
+ else if (code [0] == 0xe8) {
+ /* call <DISP> */
+ gint64 disp = (guint8*)target - (guint8*)code;
+ assert (amd64_is_imm32 (disp));
+ x86_patch (code, (unsigned char*)target);
+ }
+ else
+ x86_patch (code, (unsigned char*)target);
+}
+InstructionSelection::InstructionSelection(VM::ExecutionEngine *engine, IR::Module *module, uchar *buffer)
+ : _engine(engine)
+ , _module(module)
+ , _function(0)
+ , _block(0)
+ , _buffer(buffer)
+ , _code(buffer)
+ , _codePtr(buffer)
+{
+}
+
+InstructionSelection::~InstructionSelection()
+{
+}
+
+void InstructionSelection::operator()(IR::Function *function)
+{
+ qSwap(_function, function);
+
+ _code = _codePtr;
+ _code = (uchar *) ((size_t(_code) + 15) & ~15);
+ _function->code = (void (*)(VM::Context *)) _code;
+ _codePtr = _code;
+
+ int locals = (_function->tempCount - _function->locals.size() + _function->maxNumberOfArguments) * sizeof(Value);
+ locals = (locals + 15) & ~15;
+
+ amd64_push_reg(_codePtr, AMD64_RBP);
+ amd64_push_reg(_codePtr, AMD64_R14);
+ amd64_push_reg(_codePtr, AMD64_R15);
+
+ amd64_mov_reg_reg(_codePtr, AMD64_RBP, AMD64_RSP, 8);
+ amd64_mov_reg_reg(_codePtr, AMD64_R14, AMD64_RDI, 8);
+ amd64_alu_reg_imm(_codePtr, X86_SUB, AMD64_RSP, locals);
+
+ amd64_mov_reg_membase(_codePtr, AMD64_R15, AMD64_R14, offsetof(Context, locals), 8);
+
+ foreach (IR::BasicBlock *block, _function->basicBlocks) {
+ _block = block;
+ _addrs[block] = _codePtr;
+ foreach (IR::Stmt *s, block->statements) {
+ s->accept(this);
+ }
+ }
+
+ QHashIterator<IR::BasicBlock *, QVector<uchar *> > it(_patches);
+ while (it.hasNext()) {
+ it.next();
+ uchar *target = _addrs[it.key()];
+ foreach (uchar *instr, it.value()) {
+ amd64_patch(instr, target);
+ }
+ }
+
+ amd64_alu_reg_imm(_codePtr, X86_ADD, AMD64_RSP, locals);
+ amd64_pop_reg(_codePtr, AMD64_R15);
+ amd64_pop_reg(_codePtr, AMD64_R14);
+ amd64_pop_reg(_codePtr, AMD64_RBP);
+ amd64_ret(_codePtr);
+
+#ifndef NO_UDIS86
+ static bool showCode = !qgetenv("SHOW_CODE").isNull();
+ if (showCode) {
+ printf("code size: %ld bytes\n", (_codePtr - _code));
+ ud_t ud_obj;
+
+ ud_init(&ud_obj);
+ ud_set_input_buffer(&ud_obj, _code, _codePtr - _code);
+ ud_set_mode(&ud_obj, 64);
+ ud_set_syntax(&ud_obj, UD_SYN_ATT);
+
+ while (ud_disassemble(&ud_obj)) {
+ printf("\t%s\n", ud_insn_asm(&ud_obj));
+ }
+ }
+#endif
+ qSwap(_function, _function);
+}
+
+String *InstructionSelection::identifier(const QString &s)
+{
+ return _engine->identifier(s);
+}
+
+void InstructionSelection::loadTempAddress(int reg, IR::Temp *t)
+{
+ if (t->index < 0) {
+ const int arg = -t->index - 1;
+ amd64_mov_reg_membase(_codePtr, reg, AMD64_R14, offsetof(Context, arguments), 8);
+ amd64_lea_membase(_codePtr, reg, reg, sizeof(Value) * arg);
+ } else if (t->index < _function->locals.size()) {
+ amd64_lea_membase(_codePtr, reg, AMD64_R15, sizeof(Value) * t->index);
+ } else {
+ amd64_lea_membase(_codePtr, reg, AMD64_RSP, sizeof(Value) * (_function->maxNumberOfArguments + t->index - _function->locals.size()));
+ }
+}
+
+void InstructionSelection::callActivationProperty(IR::Call *call, IR::Temp *result)
+{
+ IR::Name *baseName = call->base->asName();
+ assert(baseName != 0);
+
+ int argc = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next) {
+ ++argc;
+ }
+
+ int i = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
+ IR::Temp *arg = it->expr->asTemp();
+ assert(arg != 0);
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
+ loadTempAddress(AMD64_RSI, arg);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ }
+
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
+
+ if (result)
+ loadTempAddress(AMD64_RSI, result);
+ else
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
+
+ if (baseName->id) {
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*baseName->id));
+ amd64_lea_membase(_codePtr, AMD64_RCX, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_R8, argc);
+ amd64_call_code(_codePtr, __qmljs_call_activation_property);
+ } else {
+ switch (baseName->builtin) {
+ case IR::Name::builtin_invalid:
+ Q_UNREACHABLE();
+ break;
+ case IR::Name::builtin_typeof:
+ amd64_lea_membase(_codePtr, AMD64_RDX, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, argc);
+ amd64_call_code(_codePtr, __qmljs_builtin_typeof);
+ break;
+ case IR::Name::builtin_throw:
+ amd64_lea_membase(_codePtr, AMD64_RDX, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, argc);
+ amd64_call_code(_codePtr, __qmljs_builtin_throw);
+ break;
+ case IR::Name::builtin_rethrow:
+ amd64_lea_membase(_codePtr, AMD64_RDX, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, argc);
+ amd64_call_code(_codePtr, __qmljs_builtin_rethrow);
+ return; // we need to return to avoid checking the exceptions
+ }
+ }
+
+ checkExceptions();
+}
+
+
+void InstructionSelection::callValue(IR::Call *call, IR::Temp *result)
+{
+ IR::Temp *baseTemp = call->base->asTemp();
+ assert(baseTemp != 0);
+
+ int argc = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next) {
+ ++argc;
+ }
+
+ int i = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
+ IR::Temp *arg = it->expr->asTemp();
+ assert(arg != 0);
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
+ loadTempAddress(AMD64_RSI, arg);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ }
+
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
+
+ if (result)
+ loadTempAddress(AMD64_RSI, result);
+ else
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
+
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RDX, AMD64_RDX);
+ loadTempAddress(AMD64_RCX, baseTemp);
+ amd64_lea_membase(_codePtr, AMD64_R8, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_R9, argc);
+ amd64_call_code(_codePtr, __qmljs_call_value);
+
+ checkExceptions();
+}
+
+void InstructionSelection::callProperty(IR::Call *call, IR::Temp *result)
+{
+ IR::Member *member = call->base->asMember();
+ assert(member != 0);
+ assert(member->base->asTemp());
+
+ int argc = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next) {
+ ++argc;
+ }
+
+ int i = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
+ IR::Temp *arg = it->expr->asTemp();
+ assert(arg != 0);
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
+ loadTempAddress(AMD64_RSI, arg);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ }
+
+ //__qmljs_call_property(ctx, result, base, name, args, argc);
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
+
+ if (result)
+ loadTempAddress(AMD64_RSI, result);
+ else
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
+
+ loadTempAddress(AMD64_RDX, member->base->asTemp());
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, identifier(*member->name));
+ amd64_lea_membase(_codePtr, AMD64_R8, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_R9, argc);
+ amd64_call_code(_codePtr, __qmljs_call_property);
+
+ checkExceptions();
+}
+
+void InstructionSelection::constructActivationProperty(IR::New *call, IR::Temp *result)
+{
+ IR::Name *baseName = call->base->asName();
+ assert(baseName != 0);
+
+ int argc = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next) {
+ ++argc;
+ }
+
+ int i = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
+ IR::Temp *arg = it->expr->asTemp();
+ assert(arg != 0);
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
+ loadTempAddress(AMD64_RSI, arg);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ }
+
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
+
+ if (result)
+ loadTempAddress(AMD64_RSI, result);
+ else
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
+
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*baseName->id));
+ amd64_lea_membase(_codePtr, AMD64_RCX, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_R8, argc);
+ amd64_call_code(_codePtr, __qmljs_construct_activation_property);
+
+ checkExceptions();
+}
+
+void InstructionSelection::constructProperty(IR::New *call, IR::Temp *result)
+{
+ IR::Member *member = call->base->asMember();
+ assert(member != 0);
+ assert(member->base->asTemp());
+
+ int argc = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next) {
+ ++argc;
+ }
+
+ int i = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
+ IR::Temp *arg = it->expr->asTemp();
+ assert(arg != 0);
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
+ loadTempAddress(AMD64_RSI, arg);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ }
+
+ //__qmljs_call_property(ctx, result, base, name, args, argc);
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
+
+ if (result)
+ loadTempAddress(AMD64_RSI, result);
+ else
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
+
+ loadTempAddress(AMD64_RDX, member->base->asTemp());
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, identifier(*member->name));
+ amd64_lea_membase(_codePtr, AMD64_R8, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_R9, argc);
+ amd64_call_code(_codePtr, __qmljs_construct_property);
+
+ checkExceptions();
+}
+
+void InstructionSelection::constructValue(IR::New *call, IR::Temp *result)
+{
+ IR::Temp *baseTemp = call->base->asTemp();
+ assert(baseTemp != 0);
+
+ int argc = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next) {
+ ++argc;
+ }
+
+ int i = 0;
+ for (IR::ExprList *it = call->args; it; it = it->next, ++i) {
+ IR::Temp *arg = it->expr->asTemp();
+ assert(arg != 0);
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_RSP, sizeof(Value) * i);
+ loadTempAddress(AMD64_RSI, arg);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ }
+
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8); // load the context
+
+ if (result)
+ loadTempAddress(AMD64_RSI, result);
+ else
+ amd64_alu_reg_reg(_codePtr, X86_XOR, AMD64_RSI, AMD64_RSI);
+
+ loadTempAddress(AMD64_RDX, baseTemp);
+ amd64_lea_membase(_codePtr, AMD64_RCX, AMD64_RSP, 0);
+ amd64_mov_reg_imm(_codePtr, AMD64_R8, argc);
+ amd64_call_code(_codePtr, __qmljs_construct_value);
+}
+
+void InstructionSelection::checkExceptions()
+{
+ amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_R14, offsetof(Context, hasUncaughtException), 4);
+ amd64_alu_reg_imm_size(_codePtr, X86_CMP, AMD64_RAX, 1, 1);
+ _patches[_function->handlersBlock].append(_codePtr);
+ amd64_branch32(_codePtr, X86_CC_E, 0, 1);
+}
+
+void InstructionSelection::visitExp(IR::Exp *s)
+{
+ if (IR::Call *c = s->expr->asCall()) {
+ if (c->base->asName()) {
+ callActivationProperty(c, 0);
+ return;
+ } else if (c->base->asTemp()) {
+ callValue(c, 0);
+ return;
+ } else if (c->base->asMember()) {
+ callProperty(c, 0);
+ return;
+ }
+ }
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+}
+
+void InstructionSelection::visitEnter(IR::Enter *)
+{
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+}
+
+void InstructionSelection::visitLeave(IR::Leave *)
+{
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+}
+
+void InstructionSelection::visitMove(IR::Move *s)
+{
+ // %rdi, %rsi, %rdx, %rcx, %r8 and %r9
+ if (s->op == IR::OpInvalid) {
+ if (IR::Name *n = s->target->asName()) {
+ String *propertyName = identifier(*n->id);
+
+ if (IR::Const *c = s->source->asConst()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
+
+ switch (c->type) {
+ case IR::BoolType:
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, c->value != 0);
+ amd64_call_code(_codePtr, __qmljs_set_activation_property_boolean);
+ break;
+
+ case IR::NumberType:
+ amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
+ amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
+ amd64_call_code(_codePtr, __qmljs_set_activation_property_number);
+ break;
+
+ default:
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ }
+ } else if (IR::String *str = s->source->asString()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, _engine->newString(*str->value));
+ amd64_call_code(_codePtr, __qmljs_set_activation_property_string);
+ } else if (IR::Temp *t = s->source->asTemp()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
+ loadTempAddress(AMD64_RDX, t);
+ amd64_call_code(_codePtr, __qmljs_set_activation_property);
+ } else if (IR::Name *other = s->source->asName()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*other->id));
+ amd64_call_code(_codePtr, __qmljs_copy_activation_property);
+ } else if (IR::Closure *clos = s->source->asClosure()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ amd64_mov_reg_imm(_codePtr, AMD64_RSI, propertyName);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, clos->value);
+ amd64_call_code(_codePtr, __qmljs_set_activation_property_closure);
+ } else {
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ }
+
+ checkExceptions();
+ return;
+ } else if (IR::Temp *t = s->target->asTemp()) {
+ if (IR::Name *n = s->source->asName()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ if (*n->id == QStringLiteral("this")) { // ### `this' should be a builtin.
+ amd64_call_code(_codePtr, __qmljs_get_thisObject);
+ } else {
+ String *propertyName = identifier(*n->id);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, propertyName);
+ amd64_call_code(_codePtr, __qmljs_get_activation_property);
+ checkExceptions();
+ }
+ return;
+ } else if (IR::Const *c = s->source->asConst()) {
+ loadTempAddress(AMD64_RSI, t);
+
+ switch (c->type) {
+ case IR::NullType:
+ amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, NULL_TYPE, 4);
+ break;
+
+ case IR::UndefinedType:
+ amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, UNDEFINED_TYPE, 4);
+ break;
+
+ case IR::BoolType:
+ amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, BOOLEAN_TYPE, 4);
+ amd64_mov_membase_imm(_codePtr, AMD64_RSI, offsetof(Value, booleanValue), c->value != 0, 1);
+ break;
+
+ case IR::NumberType:
+ amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
+ amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
+ amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, NUMBER_TYPE, 4);
+ amd64_movsd_membase_reg(_codePtr, AMD64_RSI, offsetof(Value, numberValue), AMD64_XMM0);
+ break;
+
+ default:
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ }
+ return;
+ } else if (IR::Temp *t2 = s->source->asTemp()) {
+ loadTempAddress(AMD64_RDI, t);
+ loadTempAddress(AMD64_RSI, t2);
+ amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, 0, 4);
+ amd64_mov_membase_reg(_codePtr, AMD64_RDI, 0, AMD64_RAX, 4);
+ amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, offsetof(Value, numberValue), 8);
+ amd64_mov_membase_reg(_codePtr, AMD64_RDI, offsetof(Value, numberValue), AMD64_RAX, 8);
+ return;
+ } else if (IR::String *str = s->source->asString()) {
+ loadTempAddress(AMD64_RDI, t);
+ amd64_mov_reg_imm(_codePtr, AMD64_RSI, _engine->newString(*str->value));
+ amd64_call_code(_codePtr, __qmljs_init_string);
+ return;
+ } else if (IR::Closure *clos = s->source->asClosure()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, clos->value);
+ amd64_call_code(_codePtr, __qmljs_init_closure);
+ return;
+ } else if (IR::New *ctor = s->source->asNew()) {
+ if (ctor->base->asName()) {
+ constructActivationProperty(ctor, t);
+ return;
+ } else if (ctor->base->asMember()) {
+ constructProperty(ctor, t);
+ return;
+ } else if (ctor->base->asTemp()) {
+ constructValue(ctor, t);
+ return;
+ }
+ } else if (IR::Member *m = s->source->asMember()) {
+ //__qmljs_get_property(ctx, result, object, name);
+ if (IR::Temp *base = m->base->asTemp()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ loadTempAddress(AMD64_RDX, base);
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, identifier(*m->name));
+ amd64_call_code(_codePtr, __qmljs_get_property);
+ checkExceptions();
+ return;
+ }
+ assert(!"wip");
+ return;
+ } else if (IR::Subscript *ss = s->source->asSubscript()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ loadTempAddress(AMD64_RDX, ss->base->asTemp());
+ loadTempAddress(AMD64_RCX, ss->index->asTemp());
+ amd64_call_code(_codePtr, __qmljs_get_element);
+ checkExceptions();
+ return;
+ } else if (IR::Unop *u = s->source->asUnop()) {
+ if (IR::Temp *e = u->expr->asTemp()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ loadTempAddress(AMD64_RDX, e);
+ void (*op)(Context *, Value *, const Value *) = 0;
+ switch (u->op) {
+ case IR::OpIfTrue: assert(!"unreachable"); break;
+ case IR::OpNot: op = __qmljs_not; break;
+ case IR::OpUMinus: op = __qmljs_uminus; break;
+ case IR::OpUPlus: op = __qmljs_uplus; break;
+ case IR::OpCompl: op = __qmljs_compl; break;
+ default: assert(!"unreachable"); break;
+ } // switch
+ amd64_call_code(_codePtr, op);
+ return;
+ } else if (IR::Const *c = u->expr->asConst()) {
+ assert(!"wip");
+ return;
+ }
+ } else if (IR::Binop *b = s->source->asBinop()) {
+ IR::Temp *l = b->left->asTemp();
+ IR::Temp *r = b->right->asTemp();
+ if (l && r) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ loadTempAddress(AMD64_RDX, l);
+ loadTempAddress(AMD64_RCX, r);
+
+ uchar *label1 = 0, *label2 = 0, *label3 = 0;
+
+ if (b->op == IR::OpMul || b->op == IR::OpAdd || b->op == IR::OpSub || b->op == IR::OpDiv) {
+ amd64_alu_membase_imm_size(_codePtr, X86_CMP, AMD64_RDX, 0, NUMBER_TYPE, 4);
+ label1 = _codePtr;
+ amd64_branch8(_codePtr, X86_CC_NE, 0, 0);
+ amd64_alu_membase_imm_size(_codePtr, X86_CMP, AMD64_RCX, 0, NUMBER_TYPE, 4);
+ label2 = _codePtr;
+ amd64_branch8(_codePtr, X86_CC_NE, 0, 0);
+ amd64_movsd_reg_membase(_codePtr, AMD64_XMM0, AMD64_RDX, offsetof(Value, numberValue));
+ amd64_movsd_reg_membase(_codePtr, AMD64_XMM1, AMD64_RCX, offsetof(Value, numberValue));
+ switch (b->op) {
+ case IR::OpAdd:
+ amd64_sse_addsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
+ break;
+ case IR::OpSub:
+ amd64_sse_subsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
+ break;
+ case IR::OpMul:
+ amd64_sse_mulsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
+ break;
+ case IR::OpDiv:
+ amd64_sse_divsd_reg_reg(_codePtr, AMD64_XMM0, AMD64_XMM1);
+ break;
+ default:
+ Q_UNREACHABLE();
+ } // switch
+
+ amd64_mov_membase_imm(_codePtr, AMD64_RSI, 0, NUMBER_TYPE, 4);
+ amd64_movsd_membase_reg(_codePtr, AMD64_RSI, offsetof(Value, numberValue), AMD64_XMM0);
+ label3 = _codePtr;
+ amd64_jump32(_codePtr, 0);
+ }
+
+
+ if (label1 && label2) {
+ amd64_patch(label1, _codePtr);
+ amd64_patch(label2, _codePtr);
+ }
+
+ void (*op)(Context *, Value *, const Value *, const Value *) = 0;
+
+ switch ((IR::AluOp) b->op) {
+ case IR::OpInvalid:
+ case IR::OpIfTrue:
+ case IR::OpNot:
+ case IR::OpUMinus:
+ case IR::OpUPlus:
+ case IR::OpCompl:
+ assert(!"unreachable");
+ break;
+
+ case IR::OpBitAnd: op = __qmljs_bit_and; break;
+ case IR::OpBitOr: op = __qmljs_bit_or; break;
+ case IR::OpBitXor: op = __qmljs_bit_xor; break;
+ case IR::OpAdd: op = __qmljs_add; break;
+ case IR::OpSub: op = __qmljs_sub; break;
+ case IR::OpMul: op = __qmljs_mul; break;
+ case IR::OpDiv: op = __qmljs_div; break;
+ case IR::OpMod: op = __qmljs_mod; break;
+ case IR::OpLShift: op = __qmljs_shl; break;
+ case IR::OpRShift: op = __qmljs_shr; break;
+ case IR::OpURShift: op = __qmljs_ushr; break;
+ case IR::OpGt: op = __qmljs_gt; break;
+ case IR::OpLt: op = __qmljs_lt; break;
+ case IR::OpGe: op = __qmljs_ge; break;
+ case IR::OpLe: op = __qmljs_le; break;
+ case IR::OpEqual: op = __qmljs_eq; break;
+ case IR::OpNotEqual: op = __qmljs_ne; break;
+ case IR::OpStrictEqual: op = __qmljs_se; break;
+ case IR::OpStrictNotEqual: op = __qmljs_sne; break;
+ case IR::OpInstanceof: op = __qmljs_instanceof; break;
+
+ case IR::OpIn:
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ break;
+
+ case IR::OpAnd:
+ case IR::OpOr:
+ assert(!"unreachable");
+ break;
+ }
+ amd64_call_code(_codePtr, op);
+ if (label3)
+ amd64_patch(label3, _codePtr);
+ return;
+ }
+ } else if (IR::Call *c = s->source->asCall()) {
+ if (c->base->asName()) {
+ callActivationProperty(c, t);
+ return;
+ } else if (c->base->asMember()) {
+ callProperty(c, t);
+ return;
+ } else if (c->base->asTemp()) {
+ callValue(c, t);
+ return;
+ }
+ }
+ } else if (IR::Member *m = s->target->asMember()) {
+ if (IR::Temp *base = m->base->asTemp()) {
+ if (IR::Const *c = s->source->asConst()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, base);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
+ amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
+ amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
+ amd64_call_code(_codePtr, __qmljs_set_property_number);
+ } else if (IR::String *str = s->source->asString()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, base);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, _engine->newString(*str->value));
+ amd64_call_code(_codePtr, __qmljs_set_property_string);
+ } else if (IR::Temp *t = s->source->asTemp()) {
+ // __qmljs_set_property(ctx, object, name, value);
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, base);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
+ loadTempAddress(AMD64_RCX, t);
+ amd64_call_code(_codePtr, __qmljs_set_property);
+ } else if (IR::Closure *clos = s->source->asClosure()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, base);
+ amd64_mov_reg_imm(_codePtr, AMD64_RDX, identifier(*m->name));
+ amd64_mov_reg_imm(_codePtr, AMD64_RCX, clos->value);
+ amd64_call_code(_codePtr, __qmljs_set_property_closure);
+ } else {
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ }
+ checkExceptions();
+ return;
+ }
+ } else if (IR::Subscript *ss = s->target->asSubscript()) {
+ if (IR::Temp *t2 = s->source->asTemp()) {
+ loadTempAddress(AMD64_RSI, ss->base->asTemp());
+ loadTempAddress(AMD64_RDX, ss->index->asTemp());
+ loadTempAddress(AMD64_RCX, t2);
+ amd64_call_code(_codePtr, __qmljs_set_element);
+ } else if (IR::Const *c = s->source->asConst()) {
+ if (c->type == IR::NumberType) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, ss->base->asTemp());
+ loadTempAddress(AMD64_RDX, ss->index->asTemp());
+ amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
+ amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
+ amd64_call_code(_codePtr, __qmljs_set_element_number);
+ } else {
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ }
+ } else {
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+ }
+ checkExceptions();
+ return;
+ }
+ } else {
+ // inplace assignment, e.g. x += 1, ++x, ...
+ if (IR::Temp *t = s->target->asTemp()) {
+ if (IR::Const *c = s->source->asConst()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ amd64_mov_reg_imm(_codePtr, AMD64_RAX, &c->value);
+ amd64_movsd_reg_regp(_codePtr, AMD64_XMM0, AMD64_RAX);
+
+ void (*op)(Context *, Value *, double);
+ switch (s->op) {
+ case IR::OpBitAnd: op = __qmljs_inplace_bit_and; break;
+ case IR::OpBitOr: op = __qmljs_inplace_bit_or; break;
+ case IR::OpBitXor: op = __qmljs_inplace_bit_xor; break;
+ case IR::OpAdd: op = __qmljs_inplace_add; break;
+ case IR::OpSub: op = __qmljs_inplace_sub; break;
+ case IR::OpMul: op = __qmljs_inplace_mul; break;
+ case IR::OpDiv: op = __qmljs_inplace_div; break;
+ case IR::OpMod: op = __qmljs_inplace_mod; break;
+ case IR::OpLShift: op = __qmljs_inplace_shl; break;
+ case IR::OpRShift: op = __qmljs_inplace_shr; break;
+ case IR::OpURShift: op = __qmljs_inplace_ushr; break;
+ default:
+ Q_UNREACHABLE();
+ break;
+ }
+
+ amd64_call_code(_codePtr, op);
+ return;
+ } else if (IR::Temp *t2 = s->source->asTemp()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+ amd64_mov_reg_reg(_codePtr, AMD64_RDX, AMD64_RSI, 8);
+ loadTempAddress(AMD64_RCX, t2);
+ void (*op)(Context *, Value *, const Value *, const Value *);
+ switch (s->op) {
+ case IR::OpBitAnd: op = __qmljs_bit_and; break;
+ case IR::OpBitOr: op = __qmljs_bit_or; break;
+ case IR::OpBitXor: op = __qmljs_bit_xor; break;
+ case IR::OpAdd: op = __qmljs_add; break;
+ case IR::OpSub: op = __qmljs_sub; break;
+ case IR::OpMul: op = __qmljs_mul; break;
+ case IR::OpDiv: op = __qmljs_div; break;
+ case IR::OpMod: op = __qmljs_mod; break;
+ case IR::OpLShift: op = __qmljs_shl; break;
+ case IR::OpRShift: op = __qmljs_shr; break;
+ case IR::OpURShift: op = __qmljs_ushr; break;
+ default:
+ Q_UNREACHABLE();
+ break;
+ }
+
+ amd64_call_code(_codePtr, op);
+ return;
+ }
+ } else if (IR::Name *n = s->target->asName()) {
+ if (IR::Const *c = s->source->asConst()) {
+ assert(!"wip");
+ return;
+ } else if (IR::Temp *t = s->source->asTemp()) {
+ assert(!"wip");
+ return;
+ }
+ } else if (IR::Subscript *ss = s->target->asSubscript()) {
+ if (IR::Const *c = s->source->asConst()) {
+ assert(!"wip");
+ return;
+ } else if (IR::Temp *t = s->source->asTemp()) {
+ assert(!"wip");
+ return;
+ }
+ } else if (IR::Member *m = s->target->asMember()) {
+ if (IR::Const *c = s->source->asConst()) {
+ assert(!"wip");
+ return;
+ } else if (IR::Temp *t = s->source->asTemp()) {
+ assert(!"wip");
+ return;
+ }
+ }
+ }
+
+ Q_UNIMPLEMENTED();
+ s->dump(qout, IR::Stmt::MIR);
+ qout << endl;
+ assert(!"TODO");
+}
+
+void InstructionSelection::visitJump(IR::Jump *s)
+{
+ if (_block->index + 1 != s->target->index) {
+ _patches[s->target].append(_codePtr);
+ amd64_jump32(_codePtr, 0);
+ }
+}
+
+void InstructionSelection::visitCJump(IR::CJump *s)
+{
+ if (IR::Temp *t = s->cond->asTemp()) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, t);
+
+ amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, 0, 4);
+ amd64_alu_reg_imm(_codePtr, X86_CMP, AMD64_RAX, BOOLEAN_TYPE);
+
+ uchar *label1 = _codePtr;
+ amd64_branch8(_codePtr, X86_CC_NE, 0, 0);
+
+ amd64_mov_reg_membase(_codePtr, AMD64_RAX, AMD64_RSI, offsetof(Value, booleanValue), 1);
+
+ uchar *label2 = _codePtr;
+ amd64_jump8(_codePtr, 0);
+
+ amd64_patch(label1, _codePtr);
+ amd64_call_code(_codePtr, __qmljs_to_boolean);
+
+ amd64_patch(label2, _codePtr);
+ amd64_alu_reg_imm_size(_codePtr, X86_CMP, AMD64_RAX, 0, 4);
+ _patches[s->iftrue].append(_codePtr);
+ amd64_branch32(_codePtr, X86_CC_NZ, 0, 1);
+
+ if (_block->index + 1 != s->iffalse->index) {
+ _patches[s->iffalse].append(_codePtr);
+ amd64_jump32(_codePtr, 0);
+ }
+ return;
+ } else if (IR::Binop *b = s->cond->asBinop()) {
+ IR::Temp *l = b->left->asTemp();
+ IR::Temp *r = b->right->asTemp();
+ if (l && r) {
+ amd64_mov_reg_reg(_codePtr, AMD64_RDI, AMD64_R14, 8);
+ loadTempAddress(AMD64_RSI, l);
+ loadTempAddress(AMD64_RDX, r);
+
+ // ### TODO: instruction selection for common cases (e.g. number1 < number2)
+
+ bool (*op)(Context *, const Value *, const Value *);
+ switch (b->op) {
+ default: Q_UNREACHABLE(); assert(!"todo"); break;
+ case IR::OpGt: op = __qmljs_cmp_gt; break;
+ case IR::OpLt: op = __qmljs_cmp_lt; break;
+ case IR::OpGe: op = __qmljs_cmp_ge; break;
+ case IR::OpLe: op = __qmljs_cmp_le; break;
+ case IR::OpEqual: op = __qmljs_cmp_eq; break;
+ case IR::OpNotEqual: op = __qmljs_cmp_ne; break;
+ case IR::OpStrictEqual: op = __qmljs_cmp_se; break;
+ case IR::OpStrictNotEqual: op = __qmljs_cmp_sne; break;
+ case IR::OpInstanceof: op = __qmljs_cmp_instanceof; break;
+ case IR::OpIn: op = __qmljs_cmp_in; break;
+ } // switch
+
+ amd64_call_code(_codePtr, op);
+ x86_alu_reg_imm(_codePtr, X86_CMP, X86_EAX, 0);
+
+ _patches[s->iftrue].append(_codePtr);
+ amd64_branch32(_codePtr, X86_CC_NZ, 0, 1);
+
+ if (_block->index + 1 != s->iffalse->index) {
+ _patches[s->iffalse].append(_codePtr);
+ amd64_jump32(_codePtr, 0);
+ }
+
+ return;
+ } else {
+ assert(!"wip");
+ }
+ }
+
+ Q_UNIMPLEMENTED();
+ assert(!"TODO");
+}
+
+void InstructionSelection::visitRet(IR::Ret *s)
+{
+ if (IR::Temp *t = s->expr->asTemp()) {
+ amd64_lea_membase(_codePtr, AMD64_RDI, AMD64_R14, offsetof(Context, result));
+ loadTempAddress(AMD64_RSI, t);
+ amd64_call_code(_codePtr, __qmljs_copy);
+ return;
+ }
+ Q_UNIMPLEMENTED();
+ Q_UNUSED(s);
+}
+
diff --git a/qv4isel_x86_64_p.h b/qv4isel_x86_64_p.h
new file mode 100644
index 0000000000..d53d028c50
--- /dev/null
+++ b/qv4isel_x86_64_p.h
@@ -0,0 +1,54 @@
+#ifndef QV4ISEL_P_H
+#define QV4ISEL_P_H
+
+#include "qv4ir_p.h"
+#include "qmljs_objects.h"
+
+#include <QtCore/QHash>
+
+namespace QQmlJS {
+namespace x86_64 {
+
+class InstructionSelection: protected IR::StmtVisitor
+{
+public:
+ InstructionSelection(VM::ExecutionEngine *engine, IR::Module *module, uchar *code);
+ ~InstructionSelection();
+
+ void operator()(IR::Function *function);
+
+protected:
+ VM::String *identifier(const QString &s);
+ void loadTempAddress(int reg, IR::Temp *t);
+ void callActivationProperty(IR::Call *call, IR::Temp *result);
+ void callProperty(IR::Call *call, IR::Temp *result);
+ void constructActivationProperty(IR::New *call, IR::Temp *result);
+ void constructProperty(IR::New *ctor, IR::Temp *result);
+ void callValue(IR::Call *call, IR::Temp *result);
+ void constructValue(IR::New *call, IR::Temp *result);
+ void checkExceptions();
+
+ virtual void visitExp(IR::Exp *);
+ virtual void visitEnter(IR::Enter *);
+ virtual void visitLeave(IR::Leave *);
+ virtual void visitMove(IR::Move *);
+ virtual void visitJump(IR::Jump *);
+ virtual void visitCJump(IR::CJump *);
+ virtual void visitRet(IR::Ret *);
+
+private:
+ VM::ExecutionEngine *_engine;
+ IR::Module *_module;
+ IR::Function *_function;
+ IR::BasicBlock *_block;
+ uchar *_buffer;
+ uchar *_code;
+ uchar *_codePtr;
+ QHash<IR::BasicBlock *, QVector<uchar *> > _patches;
+ QHash<IR::BasicBlock *, uchar *> _addrs;
+};
+
+} // end of namespace x86_64
+} // end of namespace QQmlJS
+
+#endif // QV4ISEL_P_H
diff --git a/v4.pro b/v4.pro
index 4198533ae3..036043d43b 100644
--- a/v4.pro
+++ b/v4.pro
@@ -15,7 +15,8 @@ SOURCES += main.cpp \
qv4isel.cpp \
qv4syntaxchecker.cpp \
qv4ecmaobjects.cpp \
- qv4array.cpp
+ qv4array.cpp \
+ qv4isel_x86_64.cpp
HEADERS += \
qv4codegen_p.h \
@@ -27,8 +28,8 @@ HEADERS += \
amd64-codegen.h \
qv4syntaxchecker_p.h \
qv4ecmaobjects_p.h \
- qv4array_p.h
-
+ qv4array_p.h \
+ qv4isel_x86_64_p.h