deps: V8: cherry-pick 59d52e311bb1 · nodejs/node@9b69baf
@@ -2893,14 +2893,38 @@ void LiftoffAssembler::CallC(const std::initializer_list<VarState> args,
28932893 parallel_move.LoadIntoRegister(LiftoffRegister{kCArgRegs[reg_args]}, arg);
28942894 ++reg_args;
28952895 } else {
2896-int bias = 0;
2897-// On BE machines values with less than 8 bytes are right justified.
2898-// bias here is relative to the stack pointer.
2899-if (arg.kind() == kI32 || arg.kind() == kF32) bias = -stack_bias;
29002896int offset =
29012897 (kStackFrameExtraParamSlot + stack_args) * kSystemPointerSize;
2902- MemOperand dst{sp, offset + bias};
2903-liftoff::StoreToMemory(this, dst, arg, r0, ip);
2898+ MemOperand dst{sp, offset};
2899+ Register scratch1 = r0;
2900+ Register scratch2 = ip;
2901+if (arg.is_reg()) {
2902+switch (arg.kind()) {
2903+case kI16:
2904+extsh(scratch1, arg.reg().gp());
2905+StoreU64(scratch1, dst);
2906+break;
2907+case kI32:
2908+extsw(scratch1, arg.reg().gp());
2909+StoreU64(scratch1, dst);
2910+break;
2911+case kI64:
2912+StoreU64(arg.reg().gp(), dst);
2913+break;
2914+default:
2915+UNREACHABLE();
2916+ }
2917+ } else if (arg.is_const()) {
2918+mov(scratch1, Operand(static_cast<int64_t>(arg.i32_const())));
2919+StoreU64(scratch1, dst);
2920+ } else if (value_kind_size(arg.kind()) == 4) {
2921+LoadS32(scratch1, liftoff::GetStackSlot(arg.offset()), scratch2);
2922+StoreU64(scratch1, dst);
2923+ } else {
2924+DCHECK_EQ(8, value_kind_size(arg.kind()));
2925+LoadU64(scratch1, liftoff::GetStackSlot(arg.offset()), scratch1);
2926+StoreU64(scratch1, dst);
2927+ }
29042928 ++stack_args;
29052929 }
29062930 }