Skip to content
This repository has been archived by the owner on Apr 22, 2023. It is now read-only.

Commit

Permalink
v8: upgrade to 3.22.24.17
Browse files Browse the repository at this point in the history
  • Loading branch information
trevnorris committed Jan 23, 2014
1 parent c79c304 commit f78e5df
Show file tree
Hide file tree
Showing 43 changed files with 585 additions and 147 deletions.
8 changes: 8 additions & 0 deletions deps/v8/src/arguments.cc
Expand Up @@ -117,4 +117,12 @@ FOR_EACH_CALLBACK_TABLE_MAPPING_2_VOID_RETURN(WRITE_CALL_2_VOID)
#undef WRITE_CALL_2_VOID


double ClobberDoubleRegisters(double x1, double x2, double x3, double x4) {
// TODO(ulan): This clobbers only subset of registers depending on compiler,
// Rewrite this in assembly to really clobber all registers.
// GCC for ia32 uses the FPU and does not touch XMM registers.
return x1 * 1.01 + x2 * 2.02 + x3 * 3.03 + x4 * 4.04;
}


} } // namespace v8::internal
11 changes: 11 additions & 0 deletions deps/v8/src/arguments.h
Expand Up @@ -289,12 +289,23 @@ class FunctionCallbackArguments
};


double ClobberDoubleRegisters(double x1, double x2, double x3, double x4);


#ifdef DEBUG
#define CLOBBER_DOUBLE_REGISTERS() ClobberDoubleRegisters(1, 2, 3, 4);
#else
#define CLOBBER_DOUBLE_REGISTERS()
#endif


#define DECLARE_RUNTIME_FUNCTION(Type, Name) \
Type Name(int args_length, Object** args_object, Isolate* isolate)

#define RUNTIME_FUNCTION(Type, Name) \
static Type __RT_impl_##Name(Arguments args, Isolate* isolate); \
Type Name(int args_length, Object** args_object, Isolate* isolate) { \
CLOBBER_DOUBLE_REGISTERS(); \
Arguments args(args_length, args_object); \
return __RT_impl_##Name(args, isolate); \
} \
Expand Down
15 changes: 13 additions & 2 deletions deps/v8/src/arm/builtins-arm.cc
Expand Up @@ -859,7 +859,8 @@ void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
}


void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
SaveFPRegsMode save_doubles) {
{
FrameScope scope(masm, StackFrame::INTERNAL);

Expand All @@ -868,7 +869,7 @@ void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
// registers.
__ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
// Pass the function and deoptimization type to the runtime system.
__ CallRuntime(Runtime::kNotifyStubFailure, 0);
__ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
__ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
}

Expand All @@ -877,6 +878,16 @@ void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
}


void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
}


void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
}


static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
Expand Down
5 changes: 5 additions & 0 deletions deps/v8/src/arm/deoptimizer-arm.cc
Expand Up @@ -127,6 +127,11 @@ bool Deoptimizer::HasAlignmentPadding(JSFunction* function) {
}


Code* Deoptimizer::NotifyStubFailureBuiltin() {
return isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
}


#define __ masm()->

// This code tries to be close to ia32 code so that any changes can be
Expand Down
67 changes: 45 additions & 22 deletions deps/v8/src/arm/lithium-codegen-arm.cc
Expand Up @@ -98,6 +98,38 @@ void LCodeGen::Abort(BailoutReason reason) {
}


void LCodeGen::SaveCallerDoubles() {
ASSERT(info()->saves_caller_doubles());
ASSERT(NeedsEagerFrame());
Comment(";;; Save clobbered callee double registers");
int count = 0;
BitVector* doubles = chunk()->allocated_double_registers();
BitVector::Iterator save_iterator(doubles);
while (!save_iterator.Done()) {
__ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
MemOperand(sp, count * kDoubleSize));
save_iterator.Advance();
count++;
}
}


void LCodeGen::RestoreCallerDoubles() {
ASSERT(info()->saves_caller_doubles());
ASSERT(NeedsEagerFrame());
Comment(";;; Restore clobbered callee double registers");
BitVector* doubles = chunk()->allocated_double_registers();
BitVector::Iterator save_iterator(doubles);
int count = 0;
while (!save_iterator.Done()) {
__ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
MemOperand(sp, count * kDoubleSize));
save_iterator.Advance();
count++;
}
}


bool LCodeGen::GeneratePrologue() {
ASSERT(is_generating());

Expand Down Expand Up @@ -158,16 +190,7 @@ bool LCodeGen::GeneratePrologue() {
}

if (info()->saves_caller_doubles()) {
Comment(";;; Save clobbered callee double registers");
int count = 0;
BitVector* doubles = chunk()->allocated_double_registers();
BitVector::Iterator save_iterator(doubles);
while (!save_iterator.Done()) {
__ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
MemOperand(sp, count * kDoubleSize));
save_iterator.Advance();
count++;
}
SaveCallerDoubles();
}

// Possibly allocate a local context.
Expand Down Expand Up @@ -313,6 +336,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
if (deopt_jump_table_[i].needs_frame) {
ASSERT(!info()->saves_caller_doubles());
__ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry)));
if (needs_frame.is_bound()) {
__ b(&needs_frame);
Expand All @@ -330,6 +354,10 @@ bool LCodeGen::GenerateDeoptJumpTable() {
__ mov(pc, ip);
}
} else {
if (info()->saves_caller_doubles()) {
ASSERT(info()->IsStub());
RestoreCallerDoubles();
}
__ mov(lr, Operand(pc), LeaveCC, al);
__ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
}
Expand Down Expand Up @@ -783,7 +811,10 @@ void LCodeGen::DeoptimizeIf(Condition condition,
}

ASSERT(info()->IsStub() || frame_is_built_);
if (condition == al && frame_is_built_) {
// Go through jump table if we need to handle condition, build frame, or
// restore caller doubles.
if (condition == al && frame_is_built_ &&
!info()->saves_caller_doubles()) {
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
Expand Down Expand Up @@ -2853,16 +2884,7 @@ void LCodeGen::DoReturn(LReturn* instr) {
__ CallRuntime(Runtime::kTraceExit, 1);
}
if (info()->saves_caller_doubles()) {
ASSERT(NeedsEagerFrame());
BitVector* doubles = chunk()->allocated_double_registers();
BitVector::Iterator save_iterator(doubles);
int count = 0;
while (!save_iterator.Done()) {
__ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
MemOperand(sp, count * kDoubleSize));
save_iterator.Advance();
count++;
}
RestoreCallerDoubles();
}
int no_frame_start = -1;
if (NeedsEagerFrame()) {
Expand Down Expand Up @@ -3434,7 +3456,8 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
__ jmp(&receiver_ok);

__ bind(&global_object);
__ ldr(receiver, GlobalObjectOperand());
__ ldr(receiver, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ ldr(receiver, ContextOperand(receiver, Context::GLOBAL_OBJECT_INDEX));
__ ldr(receiver,
FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
Expand Down
3 changes: 3 additions & 0 deletions deps/v8/src/arm/lithium-codegen-arm.h
Expand Up @@ -186,6 +186,9 @@ class LCodeGen: public LCodeGenBase {

void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }

void SaveCallerDoubles();
void RestoreCallerDoubles();

// Code generation passes. Returns true if code generation should
// continue.
bool GeneratePrologue();
Expand Down
6 changes: 4 additions & 2 deletions deps/v8/src/arm/macro-assembler-arm.h
Expand Up @@ -1045,8 +1045,10 @@ class MacroAssembler: public Assembler {
}

// Convenience function: Same as above, but takes the fid instead.
void CallRuntime(Runtime::FunctionId id, int num_arguments) {
CallRuntime(Runtime::FunctionForId(id), num_arguments);
void CallRuntime(Runtime::FunctionId id,
int num_arguments,
SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
CallRuntime(Runtime::FunctionForId(id), num_arguments, save_doubles);
}

// Convenience function: call an external reference.
Expand Down
1 change: 1 addition & 0 deletions deps/v8/src/ast.cc
Expand Up @@ -554,6 +554,7 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
if (!type->prototype()->IsJSObject()) return false;
// Go up the prototype chain, recording where we are currently.
holder_ = Handle<JSObject>(JSObject::cast(type->prototype()));
JSObject::TryMigrateInstance(holder_);
type = Handle<Map>(holder()->map());
}
}
Expand Down
3 changes: 3 additions & 0 deletions deps/v8/src/builtins.h
Expand Up @@ -111,6 +111,8 @@ enum BuiltinExtraArguments {
V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(NotifyStubFailure, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(NotifyStubFailureSaveDoubles, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
\
V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
Expand Down Expand Up @@ -400,6 +402,7 @@ class Builtins {
static void Generate_NotifySoftDeoptimized(MacroAssembler* masm);
static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm);
static void Generate_NotifyStubFailure(MacroAssembler* masm);
static void Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm);
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm);

static void Generate_FunctionCall(MacroAssembler* masm);
Expand Down
18 changes: 13 additions & 5 deletions deps/v8/src/code-stubs-hydrogen.cc
Expand Up @@ -721,15 +721,23 @@ HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(

HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
JSArrayBuilder* array_builder, ElementsKind kind) {
// Insert a bounds check because the number of arguments might exceed
// the kInitialMaxFastElementArray limit. This cannot happen for code
// that was parsed, but calling via Array.apply(thisArg, [...]) might
// trigger it.
HValue* length = GetArgumentsLength();
HConstant* max_alloc_length =
Add<HConstant>(JSObject::kInitialMaxFastElementArray);
HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);

// We need to fill with the hole if it's a smi array in the multi-argument
// case because we might have to bail out while copying arguments into
// the array because they aren't compatible with a smi array.
// If it's a double array, no problem, and if it's fast then no
// problem either because doubles are boxed.
HValue* length = GetArgumentsLength();
bool fill_with_hole = IsFastSmiElementsKind(kind);
HValue* new_object = array_builder->AllocateArray(length,
length,
HValue* new_object = array_builder->AllocateArray(checked_length,
checked_length,
fill_with_hole);
HValue* elements = array_builder->GetElementsLocation();
ASSERT(elements != NULL);
Expand All @@ -739,10 +747,10 @@ HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
context(),
LoopBuilder::kPostIncrement);
HValue* start = graph()->GetConstant0();
HValue* key = builder.BeginBody(start, length, Token::LT);
HValue* key = builder.BeginBody(start, checked_length, Token::LT);
HInstruction* argument_elements = Add<HArgumentsElements>(false);
HInstruction* argument = Add<HAccessArgumentsAt>(
argument_elements, length, key);
argument_elements, checked_length, key);

Add<HStoreKeyed>(elements, key, argument, kind);
builder.EndBody();
Expand Down
2 changes: 1 addition & 1 deletion deps/v8/src/date.js
Expand Up @@ -132,7 +132,7 @@ function TimeClip(time) {
// strings over and over again.
var Date_cache = {
// Cached time value.
time: NAN,
time: 0,
// String input for which the cached time is valid.
string: null
};
Expand Down
3 changes: 1 addition & 2 deletions deps/v8/src/deoptimizer.cc
Expand Up @@ -1574,8 +1574,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
output_frame->SetPc(reinterpret_cast<intptr_t>(
trampoline->instruction_start()));
output_frame->SetState(Smi::FromInt(FullCodeGenerator::NO_REGISTERS));
Code* notify_failure =
isolate_->builtins()->builtin(Builtins::kNotifyStubFailure);
Code* notify_failure = NotifyStubFailureBuiltin();
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(notify_failure->entry()));
}
Expand Down
4 changes: 4 additions & 0 deletions deps/v8/src/deoptimizer.h
Expand Up @@ -412,6 +412,10 @@ class Deoptimizer : public Malloced {
// at the dynamic alignment state slot inside the frame.
bool HasAlignmentPadding(JSFunction* function);

// Select the version of NotifyStubFailure builtin that either saves or
// doesn't save the double registers depending on CPU features.
Code* NotifyStubFailureBuiltin();

Isolate* isolate_;
JSFunction* function_;
Code* compiled_code_;
Expand Down
20 changes: 18 additions & 2 deletions deps/v8/src/ia32/builtins-ia32.cc
Expand Up @@ -601,7 +601,8 @@ void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
}


void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
SaveFPRegsMode save_doubles) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
Expand All @@ -610,7 +611,7 @@ void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
// stubs that tail call the runtime on deopts passing their parameters in
// registers.
__ pushad();
__ CallRuntime(Runtime::kNotifyStubFailure, 0);
__ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
__ popad();
// Tear down internal frame.
}
Expand All @@ -620,6 +621,21 @@ void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
}


void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
}


void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
if (Serializer::enabled()) {
PlatformFeatureScope sse2(SSE2);
Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
} else {
Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
}
}


static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
Expand Down
7 changes: 7 additions & 0 deletions deps/v8/src/ia32/deoptimizer-ia32.cc
Expand Up @@ -231,6 +231,13 @@ bool Deoptimizer::HasAlignmentPadding(JSFunction* function) {
}


Code* Deoptimizer::NotifyStubFailureBuiltin() {
Builtins::Name name = CpuFeatures::IsSupported(SSE2) ?
Builtins::kNotifyStubFailureSaveDoubles : Builtins::kNotifyStubFailure;
return isolate_->builtins()->builtin(name);
}


#define __ masm()->

void Deoptimizer::EntryGenerator::Generate() {
Expand Down

0 comments on commit f78e5df

Please sign in to comment.