Skip to content
This repository has been archived by the owner on Apr 22, 2023. It is now read-only.

Commit

Permalink
Browse files Browse the repository at this point in the history
Upgrade V8 to 3.6.6.19
  • Loading branch information
isaacs committed Jan 20, 2012
1 parent 4fdec07 commit 4afc46d
Show file tree
Hide file tree
Showing 27 changed files with 586 additions and 834 deletions.
9 changes: 8 additions & 1 deletion deps/v8/src/arm/assembler-arm-inl.h
Expand Up @@ -32,7 +32,7 @@

// The original source code covered by the above license above has been modified
// significantly by Google Inc.
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.

#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
#define V8_ARM_ASSEMBLER_ARM_INL_H_
Expand All @@ -46,6 +46,13 @@ namespace v8 {
namespace internal {


int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
ASSERT(!reg.is(kDoubleRegZero));
ASSERT(!reg.is(kScratchDoubleReg));
return reg.code();
}


void RelocInfo::apply(intptr_t delta) {
if (RelocInfo::IsInternalReference(rmode_)) {
// absolute code pointer inside code object moves with the code object.
Expand Down
10 changes: 4 additions & 6 deletions deps/v8/src/arm/assembler-arm.h
Expand Up @@ -32,7 +32,7 @@

// The original source code covered by the above license above has been
// modified significantly by Google Inc.
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.

// A light-weight ARM Assembler
// Generates user mode instructions for the ARM architecture up to version 5
Expand Down Expand Up @@ -176,14 +176,11 @@ struct DwVfpRegister {
static const int kNumAllocatableRegisters = kNumRegisters -
kNumReservedRegisters;

static int ToAllocationIndex(DwVfpRegister reg) {
ASSERT(reg.code() != 0);
return reg.code() - 1;
}
inline static int ToAllocationIndex(DwVfpRegister reg);

static DwVfpRegister FromAllocationIndex(int index) {
ASSERT(index >= 0 && index < kNumAllocatableRegisters);
return from_code(index + 1);
return from_code(index);
}

static const char* AllocationIndexToString(int index) {
Expand Down Expand Up @@ -307,6 +304,7 @@ const DwVfpRegister d15 = { 15 };
const DwVfpRegister kFirstCalleeSavedDoubleReg = d8;
const DwVfpRegister kLastCalleeSavedDoubleReg = d15;
const DwVfpRegister kDoubleRegZero = d14;
const DwVfpRegister kScratchDoubleReg = d15;


// Coprocessor register
Expand Down
7 changes: 3 additions & 4 deletions deps/v8/src/arm/code-stubs-arm.cc
Expand Up @@ -5392,13 +5392,12 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
// hash ^= hash >> 11;
__ eor(hash, hash, Operand(hash, LSR, 11));
// hash += hash << 15;
__ add(hash, hash, Operand(hash, LSL, 15), SetCC);
__ add(hash, hash, Operand(hash, LSL, 15));

uint32_t kHashShiftCutOffMask = (1 << (32 - String::kHashShift)) - 1;
__ and_(hash, hash, Operand(kHashShiftCutOffMask));
__ and_(hash, hash, Operand(String::kHashBitMask), SetCC);

// if (hash == 0) hash = 27;
__ mov(hash, Operand(27), LeaveCC, eq);
__ mov(hash, Operand(StringHasher::kZeroHash), LeaveCC, eq);
}


Expand Down
76 changes: 22 additions & 54 deletions deps/v8/src/arm/deoptimizer-arm.cc
Expand Up @@ -44,12 +44,6 @@ int Deoptimizer::patch_size() {
}


void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
// Nothing to do. No new relocation information is written for lazy
// deoptimization on ARM.
}


void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
HandleScope scope;
AssertNoAllocation no_allocation;
Expand All @@ -58,59 +52,38 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {

// Get the optimized code.
Code* code = function->code();
Address code_start_address = code->instruction_start();

// Invalidate the relocation information, as it will become invalid by the
// code patching below, and is not needed any more.
code->InvalidateRelocation();

// For each return after a safepoint insert an absolute call to the
// corresponding deoptimization entry.
unsigned last_pc_offset = 0;
SafepointTable table(function->code());
for (unsigned i = 0; i < table.length(); i++) {
unsigned pc_offset = table.GetPcOffset(i);
SafepointEntry safepoint_entry = table.GetEntry(i);
int deoptimization_index = safepoint_entry.deoptimization_index();
int gap_code_size = safepoint_entry.gap_code_size();
// Check that we did not shoot past next safepoint.
CHECK(pc_offset >= last_pc_offset);
// For each LLazyBailout instruction insert a call to the corresponding
// deoptimization entry.
DeoptimizationInputData* deopt_data =
DeoptimizationInputData::cast(code->deoptimization_data());
#ifdef DEBUG
// Destroy the code which is not supposed to be run again.
int instructions = (pc_offset - last_pc_offset) / Assembler::kInstrSize;
CodePatcher destroyer(code->instruction_start() + last_pc_offset,
instructions);
for (int x = 0; x < instructions; x++) {
destroyer.masm()->bkpt(0);
}
Address prev_call_address = NULL;
#endif
last_pc_offset = pc_offset;
if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
deoptimization_index, Deoptimizer::LAZY);
last_pc_offset += gap_code_size;
int call_size_in_bytes = MacroAssembler::CallSize(deoptimization_entry,
RelocInfo::NONE);
int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
ASSERT(call_size_in_bytes <= patch_size());
CodePatcher patcher(code->instruction_start() + last_pc_offset,
call_size_in_words);
patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE);
last_pc_offset += call_size_in_bytes;
}
}

for (int i = 0; i < deopt_data->DeoptCount(); i++) {
if (deopt_data->Pc(i)->value() == -1) continue;
Address call_address = code_start_address + deopt_data->Pc(i)->value();
Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
RelocInfo::NONE);
int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
ASSERT(call_size_in_bytes <= patch_size());
CodePatcher patcher(call_address, call_size_in_words);
patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
ASSERT(prev_call_address == NULL ||
call_address >= prev_call_address + patch_size());
ASSERT(call_address + patch_size() <= code->instruction_end());

#ifdef DEBUG
// Destroy the code which is not supposed to be run again.
int instructions =
(code->safepoint_table_offset() - last_pc_offset) / Assembler::kInstrSize;
CodePatcher destroyer(code->instruction_start() + last_pc_offset,
instructions);
for (int x = 0; x < instructions; x++) {
destroyer.masm()->bkpt(0);
}
prev_call_address = call_address;
#endif
}

// Add the deoptimizing code to the list.
DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
Expand All @@ -125,11 +98,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
#ifdef DEBUG
if (FLAG_print_code) {
code->PrintLn();
}
#endif
}
}

Expand Down

0 comments on commit 4afc46d

Please sign in to comment.