Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "mozilla/DebugOnly.h"
8 :
9 : #include "jit/CacheIRCompiler.h"
10 : #include "jit/IonCaches.h"
11 : #include "jit/IonIC.h"
12 :
13 : #include "jit/Linker.h"
14 : #include "jit/SharedICHelpers.h"
15 : #include "proxy/Proxy.h"
16 :
17 : #include "jscompartmentinlines.h"
18 :
19 : #include "jit/MacroAssembler-inl.h"
20 : #include "vm/TypeInference-inl.h"
21 :
22 : using namespace js;
23 : using namespace js::jit;
24 :
25 : using mozilla::DebugOnly;
26 :
27 : namespace js {
28 : namespace jit {
29 :
30 : // IonCacheIRCompiler compiles CacheIR to IonIC native code.
31 21 : class MOZ_RAII IonCacheIRCompiler : public CacheIRCompiler
32 : {
33 : public:
34 : friend class AutoSaveLiveRegisters;
35 :
36 21 : IonCacheIRCompiler(JSContext* cx, const CacheIRWriter& writer, IonIC* ic, IonScript* ionScript,
37 : IonICStub* stub, const PropertyTypeCheckInfo* typeCheckInfo)
38 21 : : CacheIRCompiler(cx, writer, Mode::Ion),
39 : writer_(writer),
40 : ic_(ic),
41 : ionScript_(ionScript),
42 : stub_(stub),
43 : typeCheckInfo_(typeCheckInfo),
44 : nextStubField_(0),
45 : #ifdef DEBUG
46 : calledPrepareVMCall_(false),
47 : #endif
48 21 : savedLiveRegs_(false)
49 : {
50 21 : MOZ_ASSERT(ic_);
51 21 : MOZ_ASSERT(ionScript_);
52 21 : }
53 :
54 : MOZ_MUST_USE bool init();
55 : JitCode* compile();
56 :
57 : private:
58 : const CacheIRWriter& writer_;
59 : IonIC* ic_;
60 : IonScript* ionScript_;
61 :
62 : // The stub we're generating code for.
63 : IonICStub* stub_;
64 :
65 : // Information necessary to generate property type checks. Non-null iff
66 : // this is a SetProp/SetElem stub.
67 : const PropertyTypeCheckInfo* typeCheckInfo_;
68 :
69 : CodeOffsetJump rejoinOffset_;
70 : Vector<CodeOffset, 4, SystemAllocPolicy> nextCodeOffsets_;
71 : Maybe<LiveRegisterSet> liveRegs_;
72 : Maybe<CodeOffset> stubJitCodeOffset_;
73 : uint32_t nextStubField_;
74 :
75 : #ifdef DEBUG
76 : bool calledPrepareVMCall_;
77 : #endif
78 : bool savedLiveRegs_;
79 :
80 29 : uintptr_t readStubWord(uint32_t offset, StubField::Type type) {
81 29 : MOZ_ASSERT((offset % sizeof(uintptr_t)) == 0);
82 29 : return writer_.readStubFieldForIon(nextStubField_++, type).asWord();
83 : }
84 0 : uint64_t readStubInt64(uint32_t offset, StubField::Type type) {
85 0 : MOZ_ASSERT((offset % sizeof(uintptr_t)) == 0);
86 0 : return writer_.readStubFieldForIon(nextStubField_++, type).asInt64();
87 : }
88 0 : int32_t int32StubField(uint32_t offset) {
89 0 : return readStubWord(offset, StubField::Type::RawWord);
90 : }
91 22 : Shape* shapeStubField(uint32_t offset) {
92 22 : return (Shape*)readStubWord(offset, StubField::Type::Shape);
93 : }
94 1 : JSObject* objectStubField(uint32_t offset) {
95 1 : return (JSObject*)readStubWord(offset, StubField::Type::JSObject);
96 : }
97 6 : JSString* stringStubField(uint32_t offset) {
98 6 : return (JSString*)readStubWord(offset, StubField::Type::String);
99 : }
100 0 : JS::Symbol* symbolStubField(uint32_t offset) {
101 0 : return (JS::Symbol*)readStubWord(offset, StubField::Type::Symbol);
102 : }
103 0 : ObjectGroup* groupStubField(uint32_t offset) {
104 0 : return (ObjectGroup*)readStubWord(offset, StubField::Type::ObjectGroup);
105 : }
106 0 : JSCompartment* compartmentStubField(uint32_t offset) {
107 0 : return (JSCompartment*)readStubWord(offset, StubField::Type::RawWord);
108 : }
109 0 : jsid idStubField(uint32_t offset) {
110 0 : return mozilla::BitwiseCast<jsid>(readStubWord(offset, StubField::Type::Id));
111 : }
112 : template <typename T>
113 0 : T rawWordStubField(uint32_t offset) {
114 : static_assert(sizeof(T) == sizeof(uintptr_t), "T must have word size");
115 0 : return (T)readStubWord(offset, StubField::Type::RawWord);
116 : }
117 : template <typename T>
118 : T rawInt64StubField(uint32_t offset) {
119 : static_assert(sizeof(T) == sizeof(int64_t), "T musthave int64 size");
120 : return (T)readStubInt64(offset, StubField::Type::RawInt64);
121 : }
122 :
123 0 : uint64_t* expandoGenerationStubFieldPtr(uint32_t offset) {
124 : DebugOnly<uint64_t> generation =
125 0 : readStubInt64(offset, StubField::Type::DOMExpandoGeneration);
126 0 : uint64_t* ptr = reinterpret_cast<uint64_t*>(stub_->stubDataStart() + offset);
127 0 : MOZ_ASSERT(*ptr == generation);
128 0 : return ptr;
129 : }
130 :
131 : void prepareVMCall(MacroAssembler& masm);
132 : MOZ_MUST_USE bool callVM(MacroAssembler& masm, const VMFunction& fun);
133 :
134 : MOZ_MUST_USE bool emitAddAndStoreSlotShared(CacheOp op);
135 :
136 0 : bool needsPostBarrier() const {
137 0 : return ic_->asSetPropertyIC()->needsPostBarrier();
138 : }
139 :
140 0 : void pushStubCodePointer() {
141 0 : stubJitCodeOffset_.emplace(masm.PushWithPatch(ImmPtr((void*)-1)));
142 0 : }
143 :
144 : #define DEFINE_OP(op) MOZ_MUST_USE bool emit##op();
145 : CACHE_IR_OPS(DEFINE_OP)
146 : #undef DEFINE_OP
147 : };
148 :
149 : // AutoSaveLiveRegisters must be used when we make a call that can GC. The
150 : // constructor ensures all live registers are stored on the stack (where the GC
151 : // expects them) and the destructor restores these registers.
152 : class MOZ_RAII AutoSaveLiveRegisters
153 : {
154 : IonCacheIRCompiler& compiler_;
155 :
156 : AutoSaveLiveRegisters(const AutoSaveLiveRegisters&) = delete;
157 : void operator=(const AutoSaveLiveRegisters&) = delete;
158 :
159 : public:
160 0 : explicit AutoSaveLiveRegisters(IonCacheIRCompiler& compiler)
161 0 : : compiler_(compiler)
162 : {
163 0 : MOZ_ASSERT(compiler_.liveRegs_.isSome());
164 0 : compiler_.allocator.saveIonLiveRegisters(compiler_.masm,
165 0 : compiler_.liveRegs_.ref(),
166 0 : compiler_.ic_->scratchRegisterForEntryJump(),
167 0 : compiler_.ionScript_);
168 0 : compiler_.savedLiveRegs_ = true;
169 0 : }
170 0 : ~AutoSaveLiveRegisters() {
171 0 : MOZ_ASSERT(compiler_.stubJitCodeOffset_.isSome(), "Must have pushed JitCode* pointer");
172 0 : compiler_.allocator.restoreIonLiveRegisters(compiler_.masm, compiler_.liveRegs_.ref());
173 0 : MOZ_ASSERT(compiler_.masm.framePushed() == compiler_.ionScript_->frameSize());
174 0 : }
175 : };
176 :
177 : } // namespace jit
178 : } // namespace js
179 :
180 : #define DEFINE_SHARED_OP(op) \
181 : bool IonCacheIRCompiler::emit##op() { return CacheIRCompiler::emit##op(); }
182 70 : CACHE_IR_SHARED_OPS(DEFINE_SHARED_OP)
183 : #undef DEFINE_SHARED_OP
184 :
185 : void
186 0 : CacheRegisterAllocator::saveIonLiveRegisters(MacroAssembler& masm, LiveRegisterSet liveRegs,
187 : Register scratch, IonScript* ionScript)
188 : {
189 : // We have to push all registers in liveRegs on the stack. It's possible we
190 : // stored other values in our live registers and stored operands on the
191 : // stack (where our live registers should go), so this requires some careful
192 : // work. Try to keep it simple by taking one small step at a time.
193 :
194 : // Step 1. Discard any dead operands so we can reuse their registers.
195 0 : freeDeadOperandLocations(masm);
196 :
197 : // Step 2. Figure out the size of our live regs.
198 : size_t sizeOfLiveRegsInBytes =
199 0 : liveRegs.gprs().size() * sizeof(intptr_t) +
200 0 : liveRegs.fpus().getPushSizeInBytes();
201 :
202 0 : MOZ_ASSERT(sizeOfLiveRegsInBytes > 0);
203 :
204 : // Step 3. Ensure all non-input operands are on the stack.
205 0 : size_t numInputs = writer_.numInputOperands();
206 0 : for (size_t i = numInputs; i < operandLocations_.length(); i++) {
207 0 : OperandLocation& loc = operandLocations_[i];
208 0 : if (loc.isInRegister())
209 0 : spillOperandToStack(masm, &loc);
210 : }
211 :
212 : // Step 4. Restore the register state, but don't discard the stack as
213 : // non-input operands are stored there.
214 0 : restoreInputState(masm, /* shouldDiscardStack = */ false);
215 :
216 : // We just restored the input state, so no input operands should be stored
217 : // on the stack.
218 : #ifdef DEBUG
219 0 : for (size_t i = 0; i < numInputs; i++) {
220 0 : const OperandLocation& loc = operandLocations_[i];
221 0 : MOZ_ASSERT(!loc.isOnStack());
222 : }
223 : #endif
224 :
225 : // Step 5. At this point our register state is correct. Stack values,
226 : // however, may cover the space where we have to store the live registers.
227 : // Move them out of the way.
228 :
229 0 : bool hasOperandOnStack = false;
230 0 : for (size_t i = numInputs; i < operandLocations_.length(); i++) {
231 0 : OperandLocation& loc = operandLocations_[i];
232 0 : if (!loc.isOnStack())
233 0 : continue;
234 :
235 0 : hasOperandOnStack = true;
236 :
237 0 : size_t operandSize = loc.stackSizeInBytes();
238 0 : size_t operandStackPushed = loc.stackPushed();
239 0 : MOZ_ASSERT(operandSize > 0);
240 0 : MOZ_ASSERT(stackPushed_ >= operandStackPushed);
241 0 : MOZ_ASSERT(operandStackPushed >= operandSize);
242 :
243 : // If this operand doesn't cover the live register space, there's
244 : // nothing to do.
245 0 : if (operandStackPushed - operandSize >= sizeOfLiveRegsInBytes) {
246 0 : MOZ_ASSERT(stackPushed_ > sizeOfLiveRegsInBytes);
247 0 : continue;
248 : }
249 :
250 : // Reserve stack space for the live registers if needed.
251 0 : if (sizeOfLiveRegsInBytes > stackPushed_) {
252 0 : size_t extraBytes = sizeOfLiveRegsInBytes - stackPushed_;
253 0 : MOZ_ASSERT((extraBytes % sizeof(uintptr_t)) == 0);
254 0 : masm.subFromStackPtr(Imm32(extraBytes));
255 0 : stackPushed_ += extraBytes;
256 : }
257 :
258 : // Push the operand below the live register space.
259 0 : if (loc.kind() == OperandLocation::PayloadStack) {
260 0 : masm.push(Address(masm.getStackPointer(), stackPushed_ - operandStackPushed));
261 0 : stackPushed_ += operandSize;
262 0 : loc.setPayloadStack(stackPushed_, loc.payloadType());
263 0 : continue;
264 : }
265 0 : MOZ_ASSERT(loc.kind() == OperandLocation::ValueStack);
266 0 : masm.pushValue(Address(masm.getStackPointer(), stackPushed_ - operandStackPushed));
267 0 : stackPushed_ += operandSize;
268 0 : loc.setValueStack(stackPushed_);
269 : }
270 :
271 : // Step 6. If we have any operands on the stack, adjust their stackPushed
272 : // values to not include sizeOfLiveRegsInBytes (this simplifies code down
273 : // the line). Then push/store the live registers.
274 0 : if (hasOperandOnStack) {
275 0 : MOZ_ASSERT(stackPushed_ > sizeOfLiveRegsInBytes);
276 0 : stackPushed_ -= sizeOfLiveRegsInBytes;
277 :
278 0 : for (size_t i = numInputs; i < operandLocations_.length(); i++) {
279 0 : OperandLocation& loc = operandLocations_[i];
280 0 : if (loc.isOnStack())
281 0 : loc.adjustStackPushed(-int32_t(sizeOfLiveRegsInBytes));
282 : }
283 :
284 0 : size_t stackBottom = stackPushed_ + sizeOfLiveRegsInBytes;
285 0 : masm.storeRegsInMask(liveRegs, Address(masm.getStackPointer(), stackBottom), scratch);
286 0 : masm.setFramePushed(masm.framePushed() + sizeOfLiveRegsInBytes);
287 : } else {
288 : // If no operands are on the stack, discard the unused stack space.
289 0 : if (stackPushed_ > 0) {
290 0 : masm.addToStackPtr(Imm32(stackPushed_));
291 0 : stackPushed_ = 0;
292 : }
293 0 : masm.PushRegsInMask(liveRegs);
294 : }
295 0 : freePayloadSlots_.clear();
296 0 : freeValueSlots_.clear();
297 :
298 0 : MOZ_ASSERT(masm.framePushed() == ionScript->frameSize() + sizeOfLiveRegsInBytes);
299 :
300 : // Step 7. All live registers and non-input operands are stored on the stack
301 : // now, so at this point all registers except for the input registers are
302 : // available.
303 0 : availableRegs_.set() = GeneralRegisterSet::Not(inputRegisterSet());
304 0 : availableRegsAfterSpill_.set() = GeneralRegisterSet();
305 :
306 : // Step 8. We restored our input state, so we have to fix up aliased input
307 : // registers again.
308 0 : fixupAliasedInputs(masm);
309 0 : }
310 :
311 : void
312 0 : CacheRegisterAllocator::restoreIonLiveRegisters(MacroAssembler& masm, LiveRegisterSet liveRegs)
313 : {
314 0 : masm.PopRegsInMask(liveRegs);
315 :
316 0 : availableRegs_.set() = GeneralRegisterSet();
317 0 : availableRegsAfterSpill_.set() = GeneralRegisterSet::All();
318 0 : }
319 :
320 : void
321 0 : IonCacheIRCompiler::prepareVMCall(MacroAssembler& masm)
322 : {
323 0 : uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
324 0 : IonICCallFrameLayout::Size());
325 0 : pushStubCodePointer();
326 0 : masm.Push(Imm32(descriptor));
327 0 : masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
328 :
329 : #ifdef DEBUG
330 0 : calledPrepareVMCall_ = true;
331 : #endif
332 0 : }
333 :
334 : bool
335 0 : IonCacheIRCompiler::callVM(MacroAssembler& masm, const VMFunction& fun)
336 : {
337 0 : MOZ_ASSERT(calledPrepareVMCall_);
338 :
339 0 : JitCode* code = cx_->runtime()->jitRuntime()->getVMWrapper(fun);
340 0 : if (!code)
341 0 : return false;
342 :
343 0 : uint32_t frameSize = fun.explicitStackSlots() * sizeof(void*);
344 0 : uint32_t descriptor = MakeFrameDescriptor(frameSize, JitFrame_IonICCall,
345 0 : ExitFrameLayout::Size());
346 0 : masm.Push(Imm32(descriptor));
347 0 : masm.callJit(code);
348 :
349 : // Remove rest of the frame left on the stack. We remove the return address
350 : // which is implicitly poped when returning.
351 0 : int framePop = sizeof(ExitFrameLayout) - sizeof(void*);
352 :
353 : // Pop arguments from framePushed.
354 0 : masm.implicitPop(frameSize + framePop);
355 0 : masm.freeStack(IonICCallFrameLayout::Size());
356 0 : return true;
357 : }
358 :
359 : bool
360 21 : IonCacheIRCompiler::init()
361 : {
362 21 : if (!allocator.init())
363 0 : return false;
364 :
365 21 : size_t numInputs = writer_.numInputOperands();
366 :
367 21 : AllocatableGeneralRegisterSet available;
368 :
369 21 : switch (ic_->kind()) {
370 : case CacheKind::GetProp:
371 : case CacheKind::GetElem: {
372 20 : IonGetPropertyIC* ic = ic_->asGetPropertyIC();
373 20 : TypedOrValueRegister output = ic->output();
374 :
375 20 : if (output.hasValue())
376 20 : available.add(output.valueReg());
377 0 : else if (!output.typedReg().isFloat())
378 0 : available.add(output.typedReg().gpr());
379 :
380 20 : if (ic->maybeTemp() != InvalidReg)
381 0 : available.add(ic->maybeTemp());
382 :
383 20 : liveRegs_.emplace(ic->liveRegs());
384 20 : outputUnchecked_.emplace(output);
385 :
386 20 : allowDoubleResult_.emplace(ic->allowDoubleResult());
387 :
388 20 : MOZ_ASSERT(numInputs == 1 || numInputs == 2);
389 :
390 20 : allocator.initInputLocation(0, ic->value());
391 20 : if (numInputs > 1)
392 15 : allocator.initInputLocation(1, ic->id());
393 20 : break;
394 : }
395 : case CacheKind::SetProp:
396 : case CacheKind::SetElem: {
397 0 : IonSetPropertyIC* ic = ic_->asSetPropertyIC();
398 :
399 0 : available.add(ic->temp());
400 :
401 0 : liveRegs_.emplace(ic->liveRegs());
402 :
403 0 : allocator.initInputLocation(0, ic->object(), JSVAL_TYPE_OBJECT);
404 :
405 0 : if (ic->kind() == CacheKind::SetProp) {
406 0 : MOZ_ASSERT(numInputs == 2);
407 0 : allocator.initInputLocation(1, ic->rhs());
408 : } else {
409 0 : MOZ_ASSERT(numInputs == 3);
410 0 : allocator.initInputLocation(1, ic->id());
411 0 : allocator.initInputLocation(2, ic->rhs());
412 : }
413 0 : break;
414 : }
415 : case CacheKind::GetName: {
416 0 : IonGetNameIC* ic = ic_->asGetNameIC();
417 0 : ValueOperand output = ic->output();
418 :
419 0 : available.add(output);
420 0 : available.add(ic->temp());
421 :
422 0 : liveRegs_.emplace(ic->liveRegs());
423 0 : outputUnchecked_.emplace(output);
424 :
425 0 : MOZ_ASSERT(numInputs == 1);
426 0 : allocator.initInputLocation(0, ic->environment(), JSVAL_TYPE_OBJECT);
427 0 : break;
428 : }
429 : case CacheKind::BindName: {
430 0 : IonBindNameIC* ic = ic_->asBindNameIC();
431 0 : Register output = ic->output();
432 :
433 0 : available.add(output);
434 0 : available.add(ic->temp());
435 :
436 0 : liveRegs_.emplace(ic->liveRegs());
437 0 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Object, AnyRegister(output)));
438 :
439 0 : MOZ_ASSERT(numInputs == 1);
440 0 : allocator.initInputLocation(0, ic->environment(), JSVAL_TYPE_OBJECT);
441 0 : break;
442 : }
443 : case CacheKind::In: {
444 1 : IonInIC* ic = ic_->asInIC();
445 1 : Register output = ic->output();
446 :
447 1 : available.add(output);
448 :
449 1 : liveRegs_.emplace(ic->liveRegs());
450 1 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
451 :
452 1 : MOZ_ASSERT(numInputs == 2);
453 1 : allocator.initInputLocation(0, ic->key());
454 2 : allocator.initInputLocation(1, TypedOrValueRegister(MIRType::Object,
455 1 : AnyRegister(ic->object())));
456 1 : break;
457 : }
458 : case CacheKind::HasOwn: {
459 0 : IonHasOwnIC* ic = ic_->asHasOwnIC();
460 0 : Register output = ic->output();
461 :
462 0 : available.add(output);
463 :
464 0 : liveRegs_.emplace(ic->liveRegs());
465 0 : outputUnchecked_.emplace(TypedOrValueRegister(MIRType::Boolean, AnyRegister(output)));
466 :
467 0 : MOZ_ASSERT(numInputs == 2);
468 0 : allocator.initInputLocation(0, ic->id());
469 0 : allocator.initInputLocation(1, ic->value());
470 0 : break;
471 : }
472 : case CacheKind::Call:
473 : case CacheKind::Compare:
474 : case CacheKind::TypeOf:
475 : case CacheKind::GetPropSuper:
476 : case CacheKind::GetElemSuper:
477 0 : MOZ_CRASH("Unsupported IC");
478 : }
479 :
480 21 : if (liveRegs_)
481 21 : liveFloatRegs_ = LiveFloatRegisterSet(liveRegs_->fpus());
482 :
483 21 : allocator.initAvailableRegs(available);
484 21 : allocator.initAvailableRegsAfterSpill();
485 21 : return true;
486 : }
487 :
488 : JitCode*
489 21 : IonCacheIRCompiler::compile()
490 : {
491 21 : masm.setFramePushed(ionScript_->frameSize());
492 21 : if (cx_->runtime()->geckoProfiler().enabled())
493 0 : masm.enableProfilingInstrumentation();
494 :
495 21 : allocator.fixupAliasedInputs(masm);
496 :
497 120 : do {
498 120 : switch (reader.readOp()) {
499 : #define DEFINE_OP(op) \
500 : case CacheOp::op: \
501 : if (!emit##op()) \
502 : return nullptr; \
503 : break;
504 21 : CACHE_IR_OPS(DEFINE_OP)
505 : #undef DEFINE_OP
506 :
507 : default:
508 0 : MOZ_CRASH("Invalid op");
509 : }
510 :
511 120 : allocator.nextOp();
512 120 : } while (reader.more());
513 :
514 21 : MOZ_ASSERT(nextStubField_ == writer_.numStubFields());
515 :
516 21 : masm.assumeUnreachable("Should have returned from IC");
517 :
518 : // Done emitting the main IC code. Now emit the failure paths.
519 53 : for (size_t i = 0; i < failurePaths.length(); i++) {
520 32 : if (!emitFailurePath(i))
521 0 : return nullptr;
522 32 : Register scratch = ic_->scratchRegisterForEntryJump();
523 32 : CodeOffset offset = masm.movWithPatch(ImmWord(-1), scratch);
524 32 : masm.jump(Address(scratch, 0));
525 32 : if (!nextCodeOffsets_.append(offset))
526 0 : return nullptr;
527 : }
528 :
529 42 : Linker linker(masm);
530 42 : AutoFlushICache afc("getStubCode");
531 42 : Rooted<JitCode*> newStubCode(cx_, linker.newCode<NoGC>(cx_, ION_CODE));
532 21 : if (!newStubCode) {
533 0 : cx_->recoverFromOutOfMemory();
534 0 : return nullptr;
535 : }
536 :
537 21 : rejoinOffset_.fixup(&masm);
538 21 : CodeLocationJump rejoinJump(newStubCode, rejoinOffset_);
539 21 : PatchJump(rejoinJump, ic_->rejoinLabel());
540 :
541 53 : for (CodeOffset offset : nextCodeOffsets_) {
542 96 : Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, offset),
543 32 : ImmPtr(stub_->nextCodeRawPtr()),
544 32 : ImmPtr((void*)-1));
545 : }
546 21 : if (stubJitCodeOffset_) {
547 0 : Assembler::PatchDataWithValueCheck(CodeLocationLabel(newStubCode, *stubJitCodeOffset_),
548 0 : ImmPtr(newStubCode.get()),
549 0 : ImmPtr((void*)-1));
550 : }
551 :
552 21 : return newStubCode;
553 : }
554 :
555 : bool
556 22 : IonCacheIRCompiler::emitGuardShape()
557 : {
558 22 : Register obj = allocator.useRegister(masm, reader.objOperandId());
559 22 : Shape* shape = shapeStubField(reader.stubOffset());
560 :
561 : FailurePath* failure;
562 22 : if (!addFailurePath(&failure))
563 0 : return false;
564 :
565 22 : masm.branchTestObjShape(Assembler::NotEqual, obj, shape, failure->label());
566 22 : return true;
567 : }
568 :
569 : bool
570 0 : IonCacheIRCompiler::emitGuardGroup()
571 : {
572 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
573 0 : ObjectGroup* group = groupStubField(reader.stubOffset());
574 :
575 : FailurePath* failure;
576 0 : if (!addFailurePath(&failure))
577 0 : return false;
578 :
579 0 : masm.branchTestObjGroup(Assembler::NotEqual, obj, group, failure->label());
580 0 : return true;
581 : }
582 :
583 : bool
584 0 : IonCacheIRCompiler::emitGuardGroupHasUnanalyzedNewScript()
585 : {
586 0 : ObjectGroup* group = groupStubField(reader.stubOffset());
587 0 : AutoScratchRegister scratch1(allocator, masm);
588 0 : AutoScratchRegister scratch2(allocator, masm);
589 :
590 : FailurePath* failure;
591 0 : if (!addFailurePath(&failure))
592 0 : return false;
593 :
594 0 : masm.movePtr(ImmGCPtr(group), scratch1);
595 0 : masm.guardGroupHasUnanalyzedNewScript(scratch1, scratch2, failure->label());
596 0 : return true;
597 : }
598 :
599 : bool
600 0 : IonCacheIRCompiler::emitGuardProto()
601 : {
602 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
603 0 : JSObject* proto = objectStubField(reader.stubOffset());
604 :
605 0 : AutoScratchRegister scratch(allocator, masm);
606 :
607 : FailurePath* failure;
608 0 : if (!addFailurePath(&failure))
609 0 : return false;
610 :
611 0 : masm.loadObjProto(obj, scratch);
612 0 : masm.branchPtr(Assembler::NotEqual, scratch, ImmGCPtr(proto), failure->label());
613 0 : return true;
614 : }
615 :
616 : bool
617 0 : IonCacheIRCompiler::emitGuardCompartment()
618 : {
619 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
620 0 : objectStubField(reader.stubOffset()); // Read global wrapper.
621 0 : JSCompartment* compartment = compartmentStubField(reader.stubOffset());
622 :
623 0 : AutoScratchRegister scratch(allocator, masm);
624 :
625 : FailurePath* failure;
626 0 : if (!addFailurePath(&failure))
627 0 : return false;
628 :
629 0 : masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch);
630 0 : masm.loadPtr(Address(scratch, ObjectGroup::offsetOfCompartment()), scratch);
631 0 : masm.branchPtr(Assembler::NotEqual, scratch, ImmPtr(compartment), failure->label());
632 0 : return true;
633 : }
634 :
635 : bool
636 0 : IonCacheIRCompiler::emitGuardSpecificObject()
637 : {
638 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
639 0 : JSObject* expected = objectStubField(reader.stubOffset());
640 :
641 : FailurePath* failure;
642 0 : if (!addFailurePath(&failure))
643 0 : return false;
644 :
645 0 : masm.branchPtr(Assembler::NotEqual, obj, ImmGCPtr(expected), failure->label());
646 0 : return true;
647 : }
648 :
649 : bool
650 6 : IonCacheIRCompiler::emitGuardSpecificAtom()
651 : {
652 6 : Register str = allocator.useRegister(masm, reader.stringOperandId());
653 12 : AutoScratchRegister scratch(allocator, masm);
654 :
655 6 : JSAtom* atom = &stringStubField(reader.stubOffset())->asAtom();
656 :
657 : FailurePath* failure;
658 6 : if (!addFailurePath(&failure))
659 0 : return false;
660 :
661 12 : Label done;
662 6 : masm.branchPtr(Assembler::Equal, str, ImmGCPtr(atom), &done);
663 :
664 : // The pointers are not equal, so if the input string is also an atom it
665 : // must be a different string.
666 12 : masm.branchTest32(Assembler::NonZero, Address(str, JSString::offsetOfFlags()),
667 6 : Imm32(JSString::ATOM_BIT), failure->label());
668 :
669 : // Check the length.
670 18 : masm.branch32(Assembler::NotEqual, Address(str, JSString::offsetOfLength()),
671 12 : Imm32(atom->length()), failure->label());
672 :
673 : // We have a non-atomized string with the same length. Call a helper
674 : // function to do the comparison.
675 6 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
676 6 : masm.PushRegsInMask(volatileRegs);
677 :
678 6 : masm.setupUnalignedABICall(scratch);
679 6 : masm.movePtr(ImmGCPtr(atom), scratch);
680 6 : masm.passABIArg(scratch);
681 6 : masm.passABIArg(str);
682 6 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, EqualStringsHelper));
683 6 : masm.mov(ReturnReg, scratch);
684 :
685 6 : LiveRegisterSet ignore;
686 6 : ignore.add(scratch);
687 6 : masm.PopRegsInMaskIgnore(volatileRegs, ignore);
688 6 : masm.branchIfFalseBool(scratch, failure->label());
689 :
690 6 : masm.bind(&done);
691 6 : return true;
692 : }
693 :
694 : bool
695 0 : IonCacheIRCompiler::emitGuardSpecificSymbol()
696 : {
697 0 : Register sym = allocator.useRegister(masm, reader.symbolOperandId());
698 0 : JS::Symbol* expected = symbolStubField(reader.stubOffset());
699 :
700 : FailurePath* failure;
701 0 : if (!addFailurePath(&failure))
702 0 : return false;
703 :
704 0 : masm.branchPtr(Assembler::NotEqual, sym, ImmGCPtr(expected), failure->label());
705 0 : return true;
706 : }
707 :
708 : bool
709 0 : IonCacheIRCompiler::emitLoadFixedSlotResult()
710 : {
711 0 : AutoOutputRegister output(*this);
712 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
713 0 : int32_t offset = int32StubField(reader.stubOffset());
714 0 : masm.loadTypedOrValue(Address(obj, offset), output);
715 0 : return true;
716 : }
717 :
718 : bool
719 0 : IonCacheIRCompiler::emitLoadDynamicSlotResult()
720 : {
721 0 : AutoOutputRegister output(*this);
722 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
723 0 : int32_t offset = int32StubField(reader.stubOffset());
724 :
725 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
726 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
727 0 : masm.loadTypedOrValue(Address(scratch, offset), output);
728 0 : return true;
729 : }
730 :
731 : bool
732 0 : IonCacheIRCompiler::emitMegamorphicLoadSlotResult()
733 : {
734 0 : AutoOutputRegister output(*this);
735 :
736 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
737 0 : PropertyName* name = stringStubField(reader.stubOffset())->asAtom().asPropertyName();
738 0 : bool handleMissing = reader.readBool();
739 :
740 0 : AutoScratchRegisterMaybeOutput scratch1(allocator, masm, output);
741 0 : AutoScratchRegister scratch2(allocator, masm);
742 0 : AutoScratchRegister scratch3(allocator, masm);
743 :
744 : FailurePath* failure;
745 0 : if (!addFailurePath(&failure))
746 0 : return false;
747 :
748 0 : masm.Push(UndefinedValue());
749 0 : masm.moveStackPtrTo(scratch3.get());
750 :
751 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
752 0 : volatileRegs.takeUnchecked(scratch1);
753 0 : volatileRegs.takeUnchecked(scratch2);
754 0 : volatileRegs.takeUnchecked(scratch3);
755 0 : masm.PushRegsInMask(volatileRegs);
756 :
757 0 : masm.setupUnalignedABICall(scratch1);
758 0 : masm.loadJSContext(scratch1);
759 0 : masm.passABIArg(scratch1);
760 0 : masm.passABIArg(obj);
761 0 : masm.movePtr(ImmGCPtr(name), scratch2);
762 0 : masm.passABIArg(scratch2);
763 0 : masm.passABIArg(scratch3);
764 0 : if (handleMissing)
765 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataProperty<true>)));
766 : else
767 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataProperty<false>)));
768 0 : masm.mov(ReturnReg, scratch2);
769 0 : masm.PopRegsInMask(volatileRegs);
770 :
771 0 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
772 0 : masm.adjustStack(sizeof(Value));
773 :
774 0 : masm.branchIfFalseBool(scratch2, failure->label());
775 0 : return true;
776 : }
777 :
778 : bool
779 0 : IonCacheIRCompiler::emitMegamorphicStoreSlot()
780 : {
781 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
782 0 : PropertyName* name = stringStubField(reader.stubOffset())->asAtom().asPropertyName();
783 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
784 0 : bool needsTypeBarrier = reader.readBool();
785 :
786 0 : AutoScratchRegister scratch1(allocator, masm);
787 0 : AutoScratchRegister scratch2(allocator, masm);
788 :
789 : FailurePath* failure;
790 0 : if (!addFailurePath(&failure))
791 0 : return false;
792 :
793 0 : masm.Push(val);
794 0 : masm.moveStackPtrTo(val.scratchReg());
795 :
796 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
797 0 : volatileRegs.takeUnchecked(scratch1);
798 0 : volatileRegs.takeUnchecked(scratch2);
799 0 : volatileRegs.takeUnchecked(val);
800 0 : masm.PushRegsInMask(volatileRegs);
801 :
802 0 : masm.setupUnalignedABICall(scratch1);
803 0 : masm.loadJSContext(scratch1);
804 0 : masm.passABIArg(scratch1);
805 0 : masm.passABIArg(obj);
806 0 : masm.movePtr(ImmGCPtr(name), scratch2);
807 0 : masm.passABIArg(scratch2);
808 0 : masm.passABIArg(val.scratchReg());
809 0 : if (needsTypeBarrier)
810 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (SetNativeDataProperty<true>)));
811 : else
812 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (SetNativeDataProperty<false>)));
813 0 : masm.mov(ReturnReg, scratch1);
814 0 : masm.PopRegsInMask(volatileRegs);
815 :
816 0 : masm.loadValue(Address(masm.getStackPointer(), 0), val);
817 0 : masm.adjustStack(sizeof(Value));
818 :
819 0 : masm.branchIfFalseBool(scratch1, failure->label());
820 0 : return true;
821 : }
822 :
823 : bool
824 0 : IonCacheIRCompiler::emitGuardHasGetterSetter()
825 : {
826 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
827 0 : Shape* shape = shapeStubField(reader.stubOffset());
828 :
829 0 : AutoScratchRegister scratch1(allocator, masm);
830 0 : AutoScratchRegister scratch2(allocator, masm);
831 :
832 : FailurePath* failure;
833 0 : if (!addFailurePath(&failure))
834 0 : return false;
835 :
836 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
837 0 : volatileRegs.takeUnchecked(scratch1);
838 0 : volatileRegs.takeUnchecked(scratch2);
839 0 : masm.PushRegsInMask(volatileRegs);
840 :
841 0 : masm.setupUnalignedABICall(scratch1);
842 0 : masm.loadJSContext(scratch1);
843 0 : masm.passABIArg(scratch1);
844 0 : masm.passABIArg(obj);
845 0 : masm.movePtr(ImmGCPtr(shape), scratch2);
846 0 : masm.passABIArg(scratch2);
847 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, ObjectHasGetterSetter));
848 0 : masm.mov(ReturnReg, scratch1);
849 0 : masm.PopRegsInMask(volatileRegs);
850 :
851 0 : masm.branchIfFalseBool(scratch1, failure->label());
852 0 : return true;
853 : }
854 :
855 : bool
856 0 : IonCacheIRCompiler::emitCallScriptedGetterResult()
857 : {
858 0 : AutoSaveLiveRegisters save(*this);
859 0 : AutoOutputRegister output(*this);
860 :
861 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
862 0 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
863 0 : AutoScratchRegister scratch(allocator, masm);
864 :
865 0 : allocator.discardStack(masm);
866 :
867 0 : uint32_t framePushedBefore = masm.framePushed();
868 :
869 : // Construct IonICCallFrameLayout.
870 0 : uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
871 0 : IonICCallFrameLayout::Size());
872 0 : pushStubCodePointer();
873 0 : masm.Push(Imm32(descriptor));
874 0 : masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
875 :
876 : // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
877 : // so we just have to make sure the stack is aligned after we push the
878 : // |this| + argument Values.
879 0 : uint32_t argSize = (target->nargs() + 1) * sizeof(Value);
880 0 : uint32_t padding = ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
881 0 : MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
882 0 : MOZ_ASSERT(padding < JitStackAlignment);
883 0 : masm.reserveStack(padding);
884 :
885 0 : for (size_t i = 0; i < target->nargs(); i++)
886 0 : masm.Push(UndefinedValue());
887 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
888 :
889 0 : masm.movePtr(ImmGCPtr(target), scratch);
890 :
891 0 : descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonICCall,
892 0 : JitFrameLayout::Size());
893 0 : masm.Push(Imm32(0)); // argc
894 0 : masm.Push(scratch);
895 0 : masm.Push(Imm32(descriptor));
896 :
897 : // Check stack alignment. Add sizeof(uintptr_t) for the return address.
898 0 : MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) == 0);
899 :
900 : // The getter has JIT code now and we will only discard the getter's JIT
901 : // code when discarding all JIT code in the Zone, so we can assume it'll
902 : // still have JIT code.
903 0 : MOZ_ASSERT(target->hasJITCode());
904 0 : masm.loadPtr(Address(scratch, JSFunction::offsetOfNativeOrScript()), scratch);
905 0 : masm.loadBaselineOrIonRaw(scratch, scratch, nullptr);
906 0 : masm.callJit(scratch);
907 0 : masm.storeCallResultValue(output);
908 :
909 0 : masm.freeStack(masm.framePushed() - framePushedBefore);
910 0 : return true;
911 : }
912 :
913 : bool
914 0 : IonCacheIRCompiler::emitCallNativeGetterResult()
915 : {
916 0 : AutoSaveLiveRegisters save(*this);
917 0 : AutoOutputRegister output(*this);
918 :
919 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
920 0 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
921 0 : MOZ_ASSERT(target->isNative());
922 :
923 0 : AutoScratchRegister argJSContext(allocator, masm);
924 0 : AutoScratchRegister argUintN(allocator, masm);
925 0 : AutoScratchRegister argVp(allocator, masm);
926 0 : AutoScratchRegister scratch(allocator, masm);
927 :
928 0 : allocator.discardStack(masm);
929 :
930 : // Native functions have the signature:
931 : // bool (*)(JSContext*, unsigned, Value* vp)
932 : // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
933 : // are the function arguments.
934 :
935 : // Construct vp array:
936 : // Push object value for |this|
937 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
938 : // Push callee/outparam.
939 0 : masm.Push(ObjectValue(*target));
940 :
941 : // Preload arguments into registers.
942 0 : masm.loadJSContext(argJSContext);
943 0 : masm.move32(Imm32(0), argUintN);
944 0 : masm.moveStackPtrTo(argVp.get());
945 :
946 : // Push marking data for later use.
947 0 : masm.Push(argUintN);
948 0 : pushStubCodePointer();
949 :
950 0 : if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save))
951 0 : return false;
952 0 : masm.enterFakeExitFrame(argJSContext, scratch, IonOOLNativeExitFrameLayoutToken);
953 :
954 : // Construct and execute call.
955 0 : masm.setupUnalignedABICall(scratch);
956 0 : masm.passABIArg(argJSContext);
957 0 : masm.passABIArg(argUintN);
958 0 : masm.passABIArg(argVp);
959 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, target->native()));
960 :
961 : // Test for failure.
962 0 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
963 :
964 : // Load the outparam vp[0] into output register(s).
965 0 : Address outparam(masm.getStackPointer(), IonOOLNativeExitFrameLayout::offsetOfResult());
966 0 : masm.loadValue(outparam, output.valueReg());
967 :
968 0 : masm.adjustStack(IonOOLNativeExitFrameLayout::Size(0));
969 0 : return true;
970 : }
971 :
972 : bool
973 0 : IonCacheIRCompiler::emitCallProxyGetResult()
974 : {
975 0 : AutoSaveLiveRegisters save(*this);
976 0 : AutoOutputRegister output(*this);
977 :
978 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
979 0 : jsid id = idStubField(reader.stubOffset());
980 :
981 : // ProxyGetProperty(JSContext* cx, HandleObject proxy, HandleId id,
982 : // MutableHandleValue vp)
983 0 : AutoScratchRegisterMaybeOutput argJSContext(allocator, masm, output);
984 0 : AutoScratchRegister argProxy(allocator, masm);
985 0 : AutoScratchRegister argId(allocator, masm);
986 0 : AutoScratchRegister argVp(allocator, masm);
987 0 : AutoScratchRegister scratch(allocator, masm);
988 :
989 0 : allocator.discardStack(masm);
990 :
991 : // Push stubCode for marking.
992 0 : pushStubCodePointer();
993 :
994 : // Push args on stack first so we can take pointers to make handles.
995 0 : masm.Push(UndefinedValue());
996 0 : masm.moveStackPtrTo(argVp.get());
997 :
998 0 : masm.Push(id, scratch);
999 0 : masm.moveStackPtrTo(argId.get());
1000 :
1001 : // Push the proxy. Also used as receiver.
1002 0 : masm.Push(obj);
1003 0 : masm.moveStackPtrTo(argProxy.get());
1004 :
1005 0 : masm.loadJSContext(argJSContext);
1006 :
1007 0 : if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save))
1008 0 : return false;
1009 0 : masm.enterFakeExitFrame(argJSContext, scratch, IonOOLProxyExitFrameLayoutToken);
1010 :
1011 : // Make the call.
1012 0 : masm.setupUnalignedABICall(scratch);
1013 0 : masm.passABIArg(argJSContext);
1014 0 : masm.passABIArg(argProxy);
1015 0 : masm.passABIArg(argId);
1016 0 : masm.passABIArg(argVp);
1017 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, ProxyGetProperty));
1018 :
1019 : // Test for failure.
1020 0 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
1021 :
1022 : // Load the outparam vp[0] into output register(s).
1023 0 : Address outparam(masm.getStackPointer(), IonOOLProxyExitFrameLayout::offsetOfResult());
1024 0 : masm.loadValue(outparam, output.valueReg());
1025 :
1026 : // masm.leaveExitFrame & pop locals
1027 0 : masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
1028 0 : return true;
1029 : }
1030 :
1031 : typedef bool (*ProxyGetPropertyByValueFn)(JSContext*, HandleObject, HandleValue, MutableHandleValue);
1032 3 : static const VMFunction ProxyGetPropertyByValueInfo =
1033 6 : FunctionInfo<ProxyGetPropertyByValueFn>(ProxyGetPropertyByValue, "ProxyGetPropertyByValue");
1034 :
1035 : bool
1036 0 : IonCacheIRCompiler::emitCallProxyGetByValueResult()
1037 : {
1038 0 : AutoSaveLiveRegisters save(*this);
1039 0 : AutoOutputRegister output(*this);
1040 :
1041 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1042 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
1043 :
1044 0 : allocator.discardStack(masm);
1045 :
1046 0 : prepareVMCall(masm);
1047 :
1048 0 : masm.Push(idVal);
1049 0 : masm.Push(obj);
1050 :
1051 0 : if (!callVM(masm, ProxyGetPropertyByValueInfo))
1052 0 : return false;
1053 :
1054 0 : masm.storeCallResultValue(output);
1055 0 : return true;
1056 : }
1057 :
1058 : typedef bool (*ProxyHasOwnFn)(JSContext*, HandleObject, HandleValue, MutableHandleValue);
1059 3 : static const VMFunction ProxyHasOwnInfo = FunctionInfo<ProxyHasOwnFn>(ProxyHasOwn, "ProxyHasOwn");
1060 :
1061 : bool
1062 0 : IonCacheIRCompiler::emitCallProxyHasOwnResult()
1063 : {
1064 0 : AutoSaveLiveRegisters save(*this);
1065 0 : AutoOutputRegister output(*this);
1066 :
1067 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1068 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
1069 :
1070 0 : allocator.discardStack(masm);
1071 :
1072 0 : prepareVMCall(masm);
1073 :
1074 0 : masm.Push(idVal);
1075 0 : masm.Push(obj);
1076 :
1077 0 : if (!callVM(masm, ProxyHasOwnInfo))
1078 0 : return false;
1079 :
1080 0 : masm.storeCallResultValue(output);
1081 0 : return true;
1082 : }
1083 :
1084 :
1085 : bool
1086 0 : IonCacheIRCompiler::emitLoadUnboxedPropertyResult()
1087 : {
1088 0 : AutoOutputRegister output(*this);
1089 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1090 :
1091 0 : JSValueType fieldType = reader.valueType();
1092 0 : int32_t fieldOffset = int32StubField(reader.stubOffset());
1093 0 : masm.loadUnboxedProperty(Address(obj, fieldOffset), fieldType, output);
1094 0 : return true;
1095 : }
1096 :
1097 : bool
1098 0 : IonCacheIRCompiler::emitGuardFrameHasNoArgumentsObject()
1099 : {
1100 0 : MOZ_CRASH("Baseline-specific op");
1101 : }
1102 :
1103 : bool
1104 0 : IonCacheIRCompiler::emitLoadFrameCalleeResult()
1105 : {
1106 0 : MOZ_CRASH("Baseline-specific op");
1107 : }
1108 :
1109 : bool
1110 0 : IonCacheIRCompiler::emitLoadFrameNumActualArgsResult()
1111 : {
1112 0 : MOZ_CRASH("Baseline-specific op");
1113 : }
1114 :
1115 : bool
1116 0 : IonCacheIRCompiler::emitLoadFrameArgumentResult()
1117 : {
1118 0 : MOZ_CRASH("Baseline-specific op");
1119 : }
1120 :
1121 : bool
1122 0 : IonCacheIRCompiler::emitLoadEnvironmentFixedSlotResult()
1123 : {
1124 0 : AutoOutputRegister output(*this);
1125 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1126 0 : int32_t offset = int32StubField(reader.stubOffset());
1127 :
1128 : FailurePath* failure;
1129 0 : if (!addFailurePath(&failure))
1130 0 : return false;
1131 :
1132 : // Check for uninitialized lexicals.
1133 0 : Address slot(obj, offset);
1134 0 : masm.branchTestMagic(Assembler::Equal, slot, failure->label());
1135 :
1136 : // Load the value.
1137 0 : masm.loadTypedOrValue(slot, output);
1138 0 : return true;
1139 : }
1140 :
1141 : bool
1142 0 : IonCacheIRCompiler::emitLoadEnvironmentDynamicSlotResult()
1143 : {
1144 0 : AutoOutputRegister output(*this);
1145 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1146 0 : int32_t offset = int32StubField(reader.stubOffset());
1147 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1148 :
1149 : FailurePath* failure;
1150 0 : if (!addFailurePath(&failure))
1151 0 : return false;
1152 :
1153 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
1154 :
1155 : // Check for uninitialized lexicals.
1156 0 : Address slot(scratch, offset);
1157 0 : masm.branchTestMagic(Assembler::Equal, slot, failure->label());
1158 :
1159 : // Load the value.
1160 0 : masm.loadTypedOrValue(slot, output);
1161 0 : return true;
1162 : }
1163 :
1164 :
1165 : bool
1166 0 : IonCacheIRCompiler::emitLoadStringResult()
1167 : {
1168 0 : MOZ_CRASH("not used in ion");
1169 : }
1170 :
1171 : typedef bool (*StringSplitHelperFn)(JSContext*, HandleString, HandleString, HandleObjectGroup,
1172 : uint32_t limit, MutableHandleValue);
1173 3 : static const VMFunction StringSplitHelperInfo =
1174 6 : FunctionInfo<StringSplitHelperFn>(StringSplitHelper, "StringSplitHelper");
1175 :
1176 : bool
1177 0 : IonCacheIRCompiler::emitCallStringSplitResult()
1178 : {
1179 0 : AutoSaveLiveRegisters save(*this);
1180 0 : AutoOutputRegister output(*this);
1181 :
1182 0 : Register str = allocator.useRegister(masm, reader.stringOperandId());
1183 0 : Register sep = allocator.useRegister(masm, reader.stringOperandId());
1184 0 : ObjectGroup* group = groupStubField(reader.stubOffset());
1185 :
1186 0 : allocator.discardStack(masm);
1187 :
1188 0 : prepareVMCall(masm);
1189 :
1190 0 : masm.Push(str);
1191 0 : masm.Push(sep);
1192 0 : masm.Push(ImmGCPtr(group));
1193 0 : masm.Push(Imm32(INT32_MAX));
1194 :
1195 0 : if (!callVM(masm, StringSplitHelperInfo))
1196 0 : return false;
1197 :
1198 0 : masm.storeCallResultValue(output);
1199 0 : return true;
1200 : }
1201 :
1202 : static bool
1203 0 : GroupHasPropertyTypes(ObjectGroup* group, jsid* id, Value* v)
1204 : {
1205 0 : if (group->unknownProperties())
1206 0 : return true;
1207 0 : HeapTypeSet* propTypes = group->maybeGetProperty(*id);
1208 0 : if (!propTypes)
1209 0 : return true;
1210 0 : if (!propTypes->nonConstantProperty())
1211 0 : return false;
1212 0 : return propTypes->hasType(TypeSet::GetValueType(*v));
1213 : }
1214 :
1215 : static void
1216 0 : EmitCheckPropertyTypes(MacroAssembler& masm, const PropertyTypeCheckInfo* typeCheckInfo,
1217 : Register obj, const ConstantOrRegister& val,
1218 : const LiveRegisterSet& liveRegs, Label* failures)
1219 : {
1220 : // Emit code to check |val| is part of the property's HeapTypeSet.
1221 :
1222 0 : if (!typeCheckInfo->isSet())
1223 0 : return;
1224 :
1225 0 : ObjectGroup* group = typeCheckInfo->group();
1226 0 : if (group->unknownProperties())
1227 0 : return;
1228 :
1229 0 : jsid id = typeCheckInfo->id();
1230 0 : HeapTypeSet* propTypes = group->maybeGetProperty(id);
1231 0 : if (propTypes && propTypes->unknown())
1232 0 : return;
1233 :
1234 : // Use the object register as scratch, as we don't need it here.
1235 0 : masm.Push(obj);
1236 0 : Register scratch1 = obj;
1237 :
1238 0 : bool checkTypeSet = true;
1239 0 : Label failedFastPath;
1240 :
1241 0 : if (propTypes && !propTypes->nonConstantProperty())
1242 0 : masm.jump(&failedFastPath);
1243 :
1244 0 : if (val.constant()) {
1245 : // If the input is a constant, then don't bother if the barrier will always fail.
1246 0 : if (!propTypes || !propTypes->hasType(TypeSet::GetValueType(val.value())))
1247 0 : masm.jump(&failedFastPath);
1248 0 : checkTypeSet = false;
1249 : } else {
1250 : // We can do the same trick as above for primitive types of specialized
1251 : // registers.
1252 0 : TypedOrValueRegister reg = val.reg();
1253 0 : if (reg.hasTyped() && reg.type() != MIRType::Object) {
1254 0 : JSValueType valType = ValueTypeFromMIRType(reg.type());
1255 0 : if (!propTypes || !propTypes->hasType(TypeSet::PrimitiveType(valType)))
1256 0 : masm.jump(&failedFastPath);
1257 0 : checkTypeSet = false;
1258 : }
1259 : }
1260 :
1261 0 : Label done;
1262 0 : if (checkTypeSet) {
1263 0 : TypedOrValueRegister valReg = val.reg();
1264 0 : if (propTypes) {
1265 : // guardTypeSet can read from type sets without triggering read barriers.
1266 0 : TypeSet::readBarrier(propTypes);
1267 0 : masm.guardTypeSet(valReg, propTypes, BarrierKind::TypeSet, scratch1, &failedFastPath);
1268 0 : masm.jump(&done);
1269 : } else {
1270 0 : masm.jump(&failedFastPath);
1271 : }
1272 : }
1273 :
1274 0 : if (failedFastPath.used()) {
1275 : // The inline type check failed. Do a callWithABI to check the current
1276 : // TypeSet in case the type was added after we generated this stub.
1277 0 : masm.bind(&failedFastPath);
1278 :
1279 0 : AllocatableRegisterSet regs(GeneralRegisterSet::Volatile(), liveRegs.fpus());
1280 0 : LiveRegisterSet save(regs.asLiveSet());
1281 0 : masm.PushRegsInMask(save);
1282 :
1283 0 : regs.takeUnchecked(scratch1);
1284 :
1285 : // Push |val| first to make sure everything is fine if |val| aliases
1286 : // scratch2.
1287 0 : Register scratch2 = regs.takeAnyGeneral();
1288 0 : masm.Push(val);
1289 0 : masm.moveStackPtrTo(scratch2);
1290 :
1291 0 : Register scratch3 = regs.takeAnyGeneral();
1292 0 : masm.Push(id, scratch3);
1293 0 : masm.moveStackPtrTo(scratch3);
1294 :
1295 0 : masm.setupUnalignedABICall(scratch1);
1296 0 : masm.movePtr(ImmGCPtr(group), scratch1);
1297 0 : masm.passABIArg(scratch1);
1298 0 : masm.passABIArg(scratch3);
1299 0 : masm.passABIArg(scratch2);
1300 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, GroupHasPropertyTypes));
1301 0 : masm.mov(ReturnReg, scratch1);
1302 :
1303 0 : masm.adjustStack(sizeof(Value) + sizeof(jsid));
1304 :
1305 0 : LiveRegisterSet ignore;
1306 0 : ignore.add(scratch1);
1307 0 : masm.PopRegsInMaskIgnore(save, ignore);
1308 :
1309 0 : masm.branchIfTrueBool(scratch1, &done);
1310 0 : masm.pop(obj);
1311 0 : masm.jump(failures);
1312 : }
1313 :
1314 0 : masm.bind(&done);
1315 0 : masm.Pop(obj);
1316 : }
1317 :
1318 : bool
1319 0 : IonCacheIRCompiler::emitStoreFixedSlot()
1320 : {
1321 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1322 0 : int32_t offset = int32StubField(reader.stubOffset());
1323 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1324 :
1325 0 : Maybe<AutoScratchRegister> scratch;
1326 0 : if (needsPostBarrier())
1327 0 : scratch.emplace(allocator, masm);
1328 :
1329 0 : if (typeCheckInfo_->isSet()) {
1330 : FailurePath* failure;
1331 0 : if (!addFailurePath(&failure))
1332 0 : return false;
1333 :
1334 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1335 : }
1336 :
1337 0 : Address slot(obj, offset);
1338 0 : EmitPreBarrier(masm, slot, MIRType::Value);
1339 0 : masm.storeConstantOrRegister(val, slot);
1340 0 : if (needsPostBarrier())
1341 0 : emitPostBarrierSlot(obj, val, scratch.ref());
1342 0 : return true;
1343 : }
1344 :
1345 : bool
1346 0 : IonCacheIRCompiler::emitStoreDynamicSlot()
1347 : {
1348 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1349 0 : int32_t offset = int32StubField(reader.stubOffset());
1350 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1351 0 : AutoScratchRegister scratch(allocator, masm);
1352 :
1353 0 : if (typeCheckInfo_->isSet()) {
1354 : FailurePath* failure;
1355 0 : if (!addFailurePath(&failure))
1356 0 : return false;
1357 :
1358 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1359 : }
1360 :
1361 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch);
1362 0 : Address slot(scratch, offset);
1363 0 : EmitPreBarrier(masm, slot, MIRType::Value);
1364 0 : masm.storeConstantOrRegister(val, slot);
1365 0 : if (needsPostBarrier())
1366 0 : emitPostBarrierSlot(obj, val, scratch);
1367 0 : return true;
1368 : }
1369 :
1370 : bool
1371 0 : IonCacheIRCompiler::emitAddAndStoreSlotShared(CacheOp op)
1372 : {
1373 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1374 0 : int32_t offset = int32StubField(reader.stubOffset());
1375 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1376 :
1377 0 : AutoScratchRegister scratch1(allocator, masm);
1378 :
1379 0 : Maybe<AutoScratchRegister> scratch2;
1380 0 : if (op == CacheOp::AllocateAndStoreDynamicSlot)
1381 0 : scratch2.emplace(allocator, masm);
1382 :
1383 0 : bool changeGroup = reader.readBool();
1384 0 : ObjectGroup* newGroup = groupStubField(reader.stubOffset());
1385 0 : Shape* newShape = shapeStubField(reader.stubOffset());
1386 :
1387 : FailurePath* failure;
1388 0 : if (!addFailurePath(&failure))
1389 0 : return false;
1390 :
1391 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1392 :
1393 0 : if (op == CacheOp::AllocateAndStoreDynamicSlot) {
1394 : // We have to (re)allocate dynamic slots. Do this first, as it's the
1395 : // only fallible operation here. Note that growSlotsDontReportOOM is
1396 : // fallible but does not GC.
1397 0 : int32_t numNewSlots = int32StubField(reader.stubOffset());
1398 0 : MOZ_ASSERT(numNewSlots > 0);
1399 :
1400 0 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1401 0 : masm.PushRegsInMask(save);
1402 :
1403 0 : masm.setupUnalignedABICall(scratch1);
1404 0 : masm.loadJSContext(scratch1);
1405 0 : masm.passABIArg(scratch1);
1406 0 : masm.passABIArg(obj);
1407 0 : masm.move32(Imm32(numNewSlots), scratch2.ref());
1408 0 : masm.passABIArg(scratch2.ref());
1409 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NativeObject::growSlotsDontReportOOM));
1410 0 : masm.mov(ReturnReg, scratch1);
1411 :
1412 0 : LiveRegisterSet ignore;
1413 0 : ignore.add(scratch1);
1414 0 : masm.PopRegsInMaskIgnore(save, ignore);
1415 :
1416 0 : masm.branchIfFalseBool(scratch1, failure->label());
1417 : }
1418 :
1419 0 : if (changeGroup) {
1420 : // Changing object's group from a partially to fully initialized group,
1421 : // per the acquired properties analysis. Only change the group if the
1422 : // old group still has a newScript. This only applies to PlainObjects.
1423 0 : Label noGroupChange;
1424 0 : masm.loadPtr(Address(obj, JSObject::offsetOfGroup()), scratch1);
1425 0 : masm.branchPtr(Assembler::Equal,
1426 0 : Address(scratch1, ObjectGroup::offsetOfAddendum()),
1427 : ImmWord(0),
1428 0 : &noGroupChange);
1429 :
1430 0 : Address groupAddr(obj, JSObject::offsetOfGroup());
1431 0 : EmitPreBarrier(masm, groupAddr, MIRType::ObjectGroup);
1432 0 : masm.storePtr(ImmGCPtr(newGroup), groupAddr);
1433 :
1434 0 : masm.bind(&noGroupChange);
1435 : }
1436 :
1437 : // Update the object's shape.
1438 0 : Address shapeAddr(obj, ShapedObject::offsetOfShape());
1439 0 : EmitPreBarrier(masm, shapeAddr, MIRType::Shape);
1440 0 : masm.storePtr(ImmGCPtr(newShape), shapeAddr);
1441 :
1442 : // Perform the store. No pre-barrier required since this is a new
1443 : // initialization.
1444 0 : if (op == CacheOp::AddAndStoreFixedSlot) {
1445 0 : Address slot(obj, offset);
1446 0 : masm.storeConstantOrRegister(val, slot);
1447 : } else {
1448 0 : MOZ_ASSERT(op == CacheOp::AddAndStoreDynamicSlot ||
1449 : op == CacheOp::AllocateAndStoreDynamicSlot);
1450 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfSlots()), scratch1);
1451 0 : Address slot(scratch1, offset);
1452 0 : masm.storeConstantOrRegister(val, slot);
1453 : }
1454 :
1455 0 : if (needsPostBarrier())
1456 0 : emitPostBarrierSlot(obj, val, scratch1);
1457 :
1458 0 : return true;
1459 : }
1460 :
1461 : bool
1462 0 : IonCacheIRCompiler::emitAddAndStoreFixedSlot()
1463 : {
1464 0 : return emitAddAndStoreSlotShared(CacheOp::AddAndStoreFixedSlot);
1465 : }
1466 :
1467 : bool
1468 0 : IonCacheIRCompiler::emitAddAndStoreDynamicSlot()
1469 : {
1470 0 : return emitAddAndStoreSlotShared(CacheOp::AddAndStoreDynamicSlot);
1471 : }
1472 :
1473 : bool
1474 0 : IonCacheIRCompiler::emitAllocateAndStoreDynamicSlot()
1475 : {
1476 0 : return emitAddAndStoreSlotShared(CacheOp::AllocateAndStoreDynamicSlot);
1477 : }
1478 :
1479 : bool
1480 0 : IonCacheIRCompiler::emitStoreUnboxedProperty()
1481 : {
1482 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1483 0 : JSValueType fieldType = reader.valueType();
1484 0 : int32_t offset = int32StubField(reader.stubOffset());
1485 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1486 :
1487 0 : Maybe<AutoScratchRegister> scratch;
1488 0 : if (needsPostBarrier() && UnboxedTypeNeedsPostBarrier(fieldType))
1489 0 : scratch.emplace(allocator, masm);
1490 :
1491 0 : if (fieldType == JSVAL_TYPE_OBJECT && typeCheckInfo_->isSet()) {
1492 : FailurePath* failure;
1493 0 : if (!addFailurePath(&failure))
1494 0 : return false;
1495 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1496 : }
1497 :
1498 : // Note that the storeUnboxedProperty call here is infallible, as the
1499 : // IR emitter is responsible for guarding on |val|'s type.
1500 0 : Address fieldAddr(obj, offset);
1501 0 : EmitICUnboxedPreBarrier(masm, fieldAddr, fieldType);
1502 0 : masm.storeUnboxedProperty(fieldAddr, fieldType, val, /* failure = */ nullptr);
1503 0 : if (needsPostBarrier() && UnboxedTypeNeedsPostBarrier(fieldType))
1504 0 : emitPostBarrierSlot(obj, val, scratch.ref());
1505 0 : return true;
1506 : }
1507 :
1508 : bool
1509 0 : IonCacheIRCompiler::emitStoreTypedObjectReferenceProperty()
1510 : {
1511 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1512 0 : int32_t offset = int32StubField(reader.stubOffset());
1513 0 : TypedThingLayout layout = reader.typedThingLayout();
1514 0 : ReferenceTypeDescr::Type type = reader.referenceTypeDescrType();
1515 :
1516 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1517 :
1518 0 : AutoScratchRegister scratch1(allocator, masm);
1519 0 : AutoScratchRegister scratch2(allocator, masm);
1520 :
1521 : // We don't need to check property types if the property is always a
1522 : // string.
1523 0 : if (type != ReferenceTypeDescr::TYPE_STRING) {
1524 : FailurePath* failure;
1525 0 : if (!addFailurePath(&failure))
1526 0 : return false;
1527 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, TypedOrValueRegister(val),
1528 0 : *liveRegs_, failure->label());
1529 : }
1530 :
1531 : // Compute the address being written to.
1532 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1533 0 : Address dest(scratch1, offset);
1534 :
1535 0 : emitStoreTypedObjectReferenceProp(val, type, dest, scratch2);
1536 :
1537 0 : if (needsPostBarrier() && type != ReferenceTypeDescr::TYPE_STRING)
1538 0 : emitPostBarrierSlot(obj, val, scratch1);
1539 0 : return true;
1540 : }
1541 :
1542 : bool
1543 0 : IonCacheIRCompiler::emitStoreTypedObjectScalarProperty()
1544 : {
1545 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1546 0 : int32_t offset = int32StubField(reader.stubOffset());
1547 0 : TypedThingLayout layout = reader.typedThingLayout();
1548 0 : Scalar::Type type = reader.scalarType();
1549 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1550 0 : AutoScratchRegister scratch1(allocator, masm);
1551 0 : AutoScratchRegister scratch2(allocator, masm);
1552 :
1553 : FailurePath* failure;
1554 0 : if (!addFailurePath(&failure))
1555 0 : return false;
1556 :
1557 : // Compute the address being written to.
1558 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1559 0 : Address dest(scratch1, offset);
1560 :
1561 0 : StoreToTypedArray(cx_, masm, type, val, dest, scratch2, failure->label());
1562 0 : return true;
1563 : }
1564 :
1565 : bool
1566 0 : IonCacheIRCompiler::emitStoreDenseElement()
1567 : {
1568 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1569 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1570 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1571 :
1572 0 : AutoScratchRegister scratch(allocator, masm);
1573 :
1574 : FailurePath* failure;
1575 0 : if (!addFailurePath(&failure))
1576 0 : return false;
1577 :
1578 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1579 :
1580 : // Load obj->elements in scratch.
1581 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1582 :
1583 : // Bounds check.
1584 0 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1585 0 : masm.branch32(Assembler::BelowOrEqual, initLength, index, failure->label());
1586 :
1587 : // Hole check.
1588 0 : BaseObjectElementIndex element(scratch, index);
1589 0 : masm.branchTestMagic(Assembler::Equal, element, failure->label());
1590 :
1591 0 : EmitPreBarrier(masm, element, MIRType::Value);
1592 0 : EmitIonStoreDenseElement(masm, val, scratch, element);
1593 0 : if (needsPostBarrier())
1594 0 : emitPostBarrierElement(obj, val, scratch, index);
1595 0 : return true;
1596 : }
1597 :
1598 : bool
1599 0 : IonCacheIRCompiler::emitStoreDenseElementHole()
1600 : {
1601 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1602 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1603 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1604 :
1605 : // handleAdd boolean is only relevant for Baseline. Ion ICs can always
1606 : // handle adds as we don't have to set any flags on the fallback stub to
1607 : // track this.
1608 0 : reader.readBool();
1609 :
1610 0 : AutoScratchRegister scratch(allocator, masm);
1611 :
1612 : FailurePath* failure;
1613 0 : if (!addFailurePath(&failure))
1614 0 : return false;
1615 :
1616 0 : EmitCheckPropertyTypes(masm, typeCheckInfo_, obj, val, *liveRegs_, failure->label());
1617 :
1618 : // Load obj->elements in scratch.
1619 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1620 :
1621 0 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1622 0 : BaseObjectElementIndex element(scratch, index);
1623 :
1624 0 : Label inBounds, doStore;
1625 0 : masm.branch32(Assembler::Above, initLength, index, &inBounds);
1626 0 : masm.branch32(Assembler::NotEqual, initLength, index, failure->label());
1627 :
1628 : // If index < capacity, we can add a dense element inline. If not we
1629 : // need to allocate more elements.
1630 0 : Label capacityOk;
1631 0 : Address capacity(scratch, ObjectElements::offsetOfCapacity());
1632 0 : masm.branch32(Assembler::Above, capacity, index, &capacityOk);
1633 :
1634 : // Check for non-writable array length. We only have to do this if
1635 : // index >= capacity.
1636 0 : Address elementsFlags(scratch, ObjectElements::offsetOfFlags());
1637 0 : masm.branchTest32(Assembler::NonZero, elementsFlags,
1638 : Imm32(ObjectElements::NONWRITABLE_ARRAY_LENGTH),
1639 0 : failure->label());
1640 :
1641 0 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1642 0 : save.takeUnchecked(scratch);
1643 0 : masm.PushRegsInMask(save);
1644 :
1645 0 : masm.setupUnalignedABICall(scratch);
1646 0 : masm.loadJSContext(scratch);
1647 0 : masm.passABIArg(scratch);
1648 0 : masm.passABIArg(obj);
1649 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NativeObject::addDenseElementDontReportOOM));
1650 0 : masm.mov(ReturnReg, scratch);
1651 :
1652 0 : masm.PopRegsInMask(save);
1653 0 : masm.branchIfFalseBool(scratch, failure->label());
1654 :
1655 : // Load the reallocated elements pointer.
1656 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1657 :
1658 0 : masm.bind(&capacityOk);
1659 :
1660 : // Increment initLength.
1661 0 : masm.add32(Imm32(1), initLength);
1662 :
1663 : // If length is now <= index, increment length too.
1664 0 : Label skipIncrementLength;
1665 0 : Address length(scratch, ObjectElements::offsetOfLength());
1666 0 : masm.branch32(Assembler::Above, length, index, &skipIncrementLength);
1667 0 : masm.add32(Imm32(1), length);
1668 0 : masm.bind(&skipIncrementLength);
1669 :
1670 : // Skip EmitPreBarrier as the memory is uninitialized.
1671 0 : masm.jump(&doStore);
1672 :
1673 0 : masm.bind(&inBounds);
1674 :
1675 0 : EmitPreBarrier(masm, element, MIRType::Value);
1676 :
1677 0 : masm.bind(&doStore);
1678 0 : EmitIonStoreDenseElement(masm, val, scratch, element);
1679 0 : if (needsPostBarrier())
1680 0 : emitPostBarrierElement(obj, val, scratch, index);
1681 0 : return true;
1682 : }
1683 :
1684 : bool
1685 0 : IonCacheIRCompiler::emitStoreTypedElement()
1686 : {
1687 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1688 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1689 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1690 :
1691 0 : TypedThingLayout layout = reader.typedThingLayout();
1692 0 : Scalar::Type arrayType = reader.scalarType();
1693 0 : bool handleOOB = reader.readBool();
1694 :
1695 0 : AutoScratchRegister scratch1(allocator, masm);
1696 :
1697 0 : Maybe<AutoScratchRegister> scratch2;
1698 0 : if (arrayType != Scalar::Float32 && arrayType != Scalar::Float64)
1699 0 : scratch2.emplace(allocator, masm);
1700 :
1701 : FailurePath* failure;
1702 0 : if (!addFailurePath(&failure))
1703 0 : return false;
1704 :
1705 : // Bounds check.
1706 0 : Label done;
1707 0 : LoadTypedThingLength(masm, layout, obj, scratch1);
1708 0 : masm.branch32(Assembler::BelowOrEqual, scratch1, index, handleOOB ? &done : failure->label());
1709 :
1710 : // Load the elements vector.
1711 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1712 :
1713 0 : BaseIndex dest(scratch1, index, ScaleFromElemWidth(Scalar::byteSize(arrayType)));
1714 :
1715 0 : FloatRegister maybeTempDouble = ic_->asSetPropertyIC()->maybeTempDouble();
1716 0 : FloatRegister maybeTempFloat32 = ic_->asSetPropertyIC()->maybeTempFloat32();
1717 0 : MOZ_ASSERT(maybeTempDouble != InvalidFloatReg);
1718 0 : MOZ_ASSERT_IF(jit::hasUnaliasedDouble(), maybeTempFloat32 != InvalidFloatReg);
1719 :
1720 0 : if (arrayType == Scalar::Float32) {
1721 0 : FloatRegister tempFloat = hasUnaliasedDouble() ? maybeTempFloat32 : maybeTempDouble;
1722 0 : if (!masm.convertConstantOrRegisterToFloat(cx_, val, tempFloat, failure->label()))
1723 0 : return false;
1724 0 : masm.storeToTypedFloatArray(arrayType, tempFloat, dest);
1725 0 : } else if (arrayType == Scalar::Float64) {
1726 0 : if (!masm.convertConstantOrRegisterToDouble(cx_, val, maybeTempDouble, failure->label()))
1727 0 : return false;
1728 0 : masm.storeToTypedFloatArray(arrayType, maybeTempDouble, dest);
1729 : } else {
1730 0 : Register valueToStore = scratch2.ref();
1731 0 : if (arrayType == Scalar::Uint8Clamped) {
1732 0 : if (!masm.clampConstantOrRegisterToUint8(cx_, val, maybeTempDouble, valueToStore,
1733 : failure->label()))
1734 : {
1735 0 : return false;
1736 : }
1737 : } else {
1738 0 : if (!masm.truncateConstantOrRegisterToInt32(cx_, val, maybeTempDouble, valueToStore,
1739 : failure->label()))
1740 : {
1741 0 : return false;
1742 : }
1743 : }
1744 0 : masm.storeToTypedIntArray(arrayType, valueToStore, dest);
1745 : }
1746 :
1747 0 : masm.bind(&done);
1748 0 : return true;
1749 : }
1750 :
1751 : bool
1752 0 : IonCacheIRCompiler::emitStoreUnboxedArrayElement()
1753 : {
1754 : // --unboxed-arrays is currently untested and broken.
1755 0 : MOZ_CRASH("Baseline-specific op");
1756 : }
1757 :
1758 : bool
1759 0 : IonCacheIRCompiler::emitStoreUnboxedArrayElementHole()
1760 : {
1761 : // --unboxed-arrays is currently untested and broken.
1762 0 : MOZ_CRASH("Baseline-specific op");
1763 : }
1764 :
1765 : bool
1766 0 : IonCacheIRCompiler::emitCallNativeSetter()
1767 : {
1768 0 : AutoSaveLiveRegisters save(*this);
1769 :
1770 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1771 0 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
1772 0 : MOZ_ASSERT(target->isNative());
1773 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1774 :
1775 0 : AutoScratchRegister argJSContext(allocator, masm);
1776 0 : AutoScratchRegister argVp(allocator, masm);
1777 0 : AutoScratchRegister argUintN(allocator, masm);
1778 0 : AutoScratchRegister scratch(allocator, masm);
1779 :
1780 0 : allocator.discardStack(masm);
1781 :
1782 : // Set up the call:
1783 : // bool (*)(JSContext*, unsigned, Value* vp)
1784 : // vp[0] is callee/outparam
1785 : // vp[1] is |this|
1786 : // vp[2] is the value
1787 :
1788 : // Build vp and move the base into argVpReg.
1789 0 : masm.Push(val);
1790 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
1791 0 : masm.Push(ObjectValue(*target));
1792 0 : masm.moveStackPtrTo(argVp.get());
1793 :
1794 : // Preload other regs.
1795 0 : masm.loadJSContext(argJSContext);
1796 0 : masm.move32(Imm32(1), argUintN);
1797 :
1798 : // Push marking data for later use.
1799 0 : masm.Push(argUintN);
1800 0 : pushStubCodePointer();
1801 :
1802 0 : if (!masm.icBuildOOLFakeExitFrame(GetReturnAddressToIonCode(cx_), save))
1803 0 : return false;
1804 0 : masm.enterFakeExitFrame(argJSContext, scratch, IonOOLNativeExitFrameLayoutToken);
1805 :
1806 : // Make the call.
1807 0 : masm.setupUnalignedABICall(scratch);
1808 0 : masm.passABIArg(argJSContext);
1809 0 : masm.passABIArg(argUintN);
1810 0 : masm.passABIArg(argVp);
1811 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, target->native()));
1812 :
1813 : // Test for failure.
1814 0 : masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
1815 :
1816 0 : masm.adjustStack(IonOOLNativeExitFrameLayout::Size(1));
1817 0 : return true;
1818 : }
1819 :
1820 : bool
1821 0 : IonCacheIRCompiler::emitCallScriptedSetter()
1822 : {
1823 0 : AutoSaveLiveRegisters save(*this);
1824 :
1825 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1826 0 : JSFunction* target = &objectStubField(reader.stubOffset())->as<JSFunction>();
1827 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1828 :
1829 0 : AutoScratchRegister scratch(allocator, masm);
1830 :
1831 0 : allocator.discardStack(masm);
1832 :
1833 0 : uint32_t framePushedBefore = masm.framePushed();
1834 :
1835 : // Construct IonICCallFrameLayout.
1836 0 : uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS,
1837 0 : IonICCallFrameLayout::Size());
1838 0 : pushStubCodePointer();
1839 0 : masm.Push(Imm32(descriptor));
1840 0 : masm.Push(ImmPtr(GetReturnAddressToIonCode(cx_)));
1841 :
1842 : // The JitFrameLayout pushed below will be aligned to JitStackAlignment,
1843 : // so we just have to make sure the stack is aligned after we push the
1844 : // |this| + argument Values.
1845 0 : size_t numArgs = Max<size_t>(1, target->nargs());
1846 0 : uint32_t argSize = (numArgs + 1) * sizeof(Value);
1847 0 : uint32_t padding = ComputeByteAlignment(masm.framePushed() + argSize, JitStackAlignment);
1848 0 : MOZ_ASSERT(padding % sizeof(uintptr_t) == 0);
1849 0 : MOZ_ASSERT(padding < JitStackAlignment);
1850 0 : masm.reserveStack(padding);
1851 :
1852 0 : for (size_t i = 1; i < target->nargs(); i++)
1853 0 : masm.Push(UndefinedValue());
1854 0 : masm.Push(val);
1855 0 : masm.Push(TypedOrValueRegister(MIRType::Object, AnyRegister(obj)));
1856 :
1857 0 : masm.movePtr(ImmGCPtr(target), scratch);
1858 :
1859 0 : descriptor = MakeFrameDescriptor(argSize + padding, JitFrame_IonICCall,
1860 0 : JitFrameLayout::Size());
1861 0 : masm.Push(Imm32(1)); // argc
1862 0 : masm.Push(scratch);
1863 0 : masm.Push(Imm32(descriptor));
1864 :
1865 : // Check stack alignment. Add sizeof(uintptr_t) for the return address.
1866 0 : MOZ_ASSERT(((masm.framePushed() + sizeof(uintptr_t)) % JitStackAlignment) == 0);
1867 :
1868 : // The setter has JIT code now and we will only discard the setter's JIT
1869 : // code when discarding all JIT code in the Zone, so we can assume it'll
1870 : // still have JIT code.
1871 0 : MOZ_ASSERT(target->hasJITCode());
1872 0 : masm.loadPtr(Address(scratch, JSFunction::offsetOfNativeOrScript()), scratch);
1873 0 : masm.loadBaselineOrIonRaw(scratch, scratch, nullptr);
1874 0 : masm.callJit(scratch);
1875 :
1876 0 : masm.freeStack(masm.framePushed() - framePushedBefore);
1877 0 : return true;
1878 : }
1879 :
1880 : typedef bool (*SetArrayLengthFn)(JSContext*, HandleObject, HandleValue, bool);
1881 3 : static const VMFunction SetArrayLengthInfo =
1882 6 : FunctionInfo<SetArrayLengthFn>(SetArrayLength, "SetArrayLength");
1883 :
1884 : bool
1885 0 : IonCacheIRCompiler::emitCallSetArrayLength()
1886 : {
1887 0 : AutoSaveLiveRegisters save(*this);
1888 :
1889 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1890 0 : bool strict = reader.readBool();
1891 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1892 :
1893 0 : allocator.discardStack(masm);
1894 0 : prepareVMCall(masm);
1895 :
1896 0 : masm.Push(Imm32(strict));
1897 0 : masm.Push(val);
1898 0 : masm.Push(obj);
1899 :
1900 0 : return callVM(masm, SetArrayLengthInfo);
1901 : }
1902 :
1903 : typedef bool (*ProxySetPropertyFn)(JSContext*, HandleObject, HandleId, HandleValue, bool);
1904 3 : static const VMFunction ProxySetPropertyInfo =
1905 6 : FunctionInfo<ProxySetPropertyFn>(ProxySetProperty, "ProxySetProperty");
1906 :
1907 : bool
1908 0 : IonCacheIRCompiler::emitCallProxySet()
1909 : {
1910 0 : AutoSaveLiveRegisters save(*this);
1911 :
1912 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1913 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1914 0 : jsid id = idStubField(reader.stubOffset());
1915 0 : bool strict = reader.readBool();
1916 :
1917 0 : AutoScratchRegister scratch(allocator, masm);
1918 :
1919 0 : allocator.discardStack(masm);
1920 0 : prepareVMCall(masm);
1921 :
1922 0 : masm.Push(Imm32(strict));
1923 0 : masm.Push(val);
1924 0 : masm.Push(id, scratch);
1925 0 : masm.Push(obj);
1926 :
1927 0 : return callVM(masm, ProxySetPropertyInfo);
1928 : }
1929 :
1930 : typedef bool (*ProxySetPropertyByValueFn)(JSContext*, HandleObject, HandleValue, HandleValue, bool);
1931 3 : static const VMFunction ProxySetPropertyByValueInfo =
1932 6 : FunctionInfo<ProxySetPropertyByValueFn>(ProxySetPropertyByValue, "ProxySetPropertyByValue");
1933 :
1934 : bool
1935 0 : IonCacheIRCompiler::emitCallProxySetByValue()
1936 : {
1937 0 : AutoSaveLiveRegisters save(*this);
1938 :
1939 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1940 0 : ConstantOrRegister idVal = allocator.useConstantOrRegister(masm, reader.valOperandId());
1941 0 : ConstantOrRegister val = allocator.useConstantOrRegister(masm, reader.valOperandId());
1942 0 : bool strict = reader.readBool();
1943 :
1944 0 : allocator.discardStack(masm);
1945 0 : prepareVMCall(masm);
1946 :
1947 0 : masm.Push(Imm32(strict));
1948 0 : masm.Push(val);
1949 0 : masm.Push(idVal);
1950 0 : masm.Push(obj);
1951 :
1952 0 : return callVM(masm, ProxySetPropertyByValueInfo);
1953 : }
1954 :
1955 : bool
1956 0 : IonCacheIRCompiler::emitLoadTypedObjectResult()
1957 : {
1958 0 : AutoOutputRegister output(*this);
1959 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1960 0 : AutoScratchRegister scratch1(allocator, masm);
1961 0 : AutoScratchRegister scratch2(allocator, masm);
1962 :
1963 0 : TypedThingLayout layout = reader.typedThingLayout();
1964 0 : uint32_t typeDescr = reader.typeDescrKey();
1965 0 : uint32_t fieldOffset = int32StubField(reader.stubOffset());
1966 :
1967 : // Get the object's data pointer.
1968 0 : LoadTypedThingData(masm, layout, obj, scratch1);
1969 :
1970 0 : Address fieldAddr(scratch1, fieldOffset);
1971 0 : emitLoadTypedObjectResultShared(fieldAddr, scratch2, layout, typeDescr, output);
1972 0 : return true;
1973 : }
1974 :
1975 : bool
1976 9 : IonCacheIRCompiler::emitTypeMonitorResult()
1977 : {
1978 9 : return emitReturnFromIC();
1979 : }
1980 :
1981 : bool
1982 21 : IonCacheIRCompiler::emitReturnFromIC()
1983 : {
1984 21 : if (!savedLiveRegs_)
1985 21 : allocator.restoreInputState(masm);
1986 :
1987 21 : RepatchLabel rejoin;
1988 21 : rejoinOffset_ = masm.jumpWithPatch(&rejoin);
1989 21 : masm.bind(&rejoin);
1990 21 : return true;
1991 : }
1992 :
1993 : bool
1994 1 : IonCacheIRCompiler::emitLoadObject()
1995 : {
1996 1 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1997 1 : JSObject* obj = objectStubField(reader.stubOffset());
1998 1 : masm.movePtr(ImmGCPtr(obj), reg);
1999 1 : return true;
2000 : }
2001 :
2002 : bool
2003 0 : IonCacheIRCompiler::emitLoadStackValue()
2004 : {
2005 0 : MOZ_ASSERT_UNREACHABLE("emitLoadStackValue not supported for IonCaches.");
2006 : return false;
2007 : }
2008 :
2009 : bool
2010 0 : IonCacheIRCompiler::emitGuardDOMExpandoMissingOrGuardShape()
2011 : {
2012 0 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
2013 0 : Shape* shape = shapeStubField(reader.stubOffset());
2014 :
2015 0 : AutoScratchRegister objScratch(allocator, masm);
2016 :
2017 : FailurePath* failure;
2018 0 : if (!addFailurePath(&failure))
2019 0 : return false;
2020 :
2021 0 : Label done;
2022 0 : masm.branchTestUndefined(Assembler::Equal, val, &done);
2023 :
2024 0 : masm.debugAssertIsObject(val);
2025 0 : masm.unboxObject(val, objScratch);
2026 0 : masm.branchTestObjShape(Assembler::NotEqual, objScratch, shape, failure->label());
2027 :
2028 0 : masm.bind(&done);
2029 0 : return true;
2030 : }
2031 :
2032 : bool
2033 0 : IonCacheIRCompiler::emitLoadDOMExpandoValueGuardGeneration()
2034 : {
2035 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2036 : ExpandoAndGeneration* expandoAndGeneration =
2037 0 : rawWordStubField<ExpandoAndGeneration*>(reader.stubOffset());
2038 0 : uint64_t* generationFieldPtr = expandoGenerationStubFieldPtr(reader.stubOffset());
2039 :
2040 0 : AutoScratchRegister scratch1(allocator, masm);
2041 0 : AutoScratchRegister scratch2(allocator, masm);
2042 0 : ValueOperand output = allocator.defineValueRegister(masm, reader.valOperandId());
2043 :
2044 : FailurePath* failure;
2045 0 : if (!addFailurePath(&failure))
2046 0 : return false;
2047 :
2048 0 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), scratch1);
2049 0 : Address expandoAddr(scratch1, detail::ProxyReservedSlots::offsetOfPrivateSlot());
2050 :
2051 : // Guard the ExpandoAndGeneration* matches the proxy's ExpandoAndGeneration.
2052 0 : masm.loadValue(expandoAddr, output);
2053 0 : masm.branchTestValue(Assembler::NotEqual, output, PrivateValue(expandoAndGeneration),
2054 0 : failure->label());
2055 :
2056 : // Guard expandoAndGeneration->generation matches the expected generation.
2057 0 : masm.movePtr(ImmPtr(expandoAndGeneration), output.scratchReg());
2058 0 : masm.movePtr(ImmPtr(generationFieldPtr), scratch1);
2059 0 : masm.branch64(Assembler::NotEqual,
2060 0 : Address(output.scratchReg(), ExpandoAndGeneration::offsetOfGeneration()),
2061 0 : Address(scratch1, 0),
2062 : scratch2,
2063 0 : failure->label());
2064 :
2065 : // Load expandoAndGeneration->expando into the output Value register.
2066 0 : masm.loadValue(Address(output.scratchReg(), ExpandoAndGeneration::offsetOfExpando()), output);
2067 0 : return true;
2068 : }
2069 :
2070 : void
2071 21 : IonIC::attachCacheIRStub(JSContext* cx, const CacheIRWriter& writer, CacheKind kind,
2072 : IonScript* ionScript, bool* attached,
2073 : const PropertyTypeCheckInfo* typeCheckInfo)
2074 : {
2075 : // We shouldn't GC or report OOM (or any other exception) here.
2076 42 : AutoAssertNoPendingException aanpe(cx);
2077 42 : JS::AutoCheckCannotGC nogc;
2078 :
2079 21 : MOZ_ASSERT(!*attached);
2080 :
2081 : // SetProp/SetElem stubs must have non-null typeCheckInfo.
2082 21 : MOZ_ASSERT(!!typeCheckInfo == (kind == CacheKind::SetProp || kind == CacheKind::SetElem));
2083 :
2084 : // Do nothing if the IR generator failed or triggered a GC that invalidated
2085 : // the script.
2086 21 : if (writer.failed() || ionScript->invalidated())
2087 0 : return;
2088 :
2089 21 : JitZone* jitZone = cx->zone()->jitZone();
2090 21 : uint32_t stubDataOffset = sizeof(IonICStub);
2091 :
2092 : // Try to reuse a previously-allocated CacheIRStubInfo.
2093 : CacheIRStubKey::Lookup lookup(kind, ICStubEngine::IonIC,
2094 21 : writer.codeStart(), writer.codeLength());
2095 21 : CacheIRStubInfo* stubInfo = jitZone->getIonCacheIRStubInfo(lookup);
2096 21 : if (!stubInfo) {
2097 : // Allocate the shared CacheIRStubInfo. Note that the
2098 : // putIonCacheIRStubInfo call below will transfer ownership to
2099 : // the stub info HashSet, so we don't have to worry about freeing
2100 : // it below.
2101 :
2102 : // For Ion ICs, we don't track/use the makesGCCalls flag, so just pass true.
2103 6 : bool makesGCCalls = true;
2104 6 : stubInfo = CacheIRStubInfo::New(kind, ICStubEngine::IonIC, makesGCCalls,
2105 6 : stubDataOffset, writer);
2106 6 : if (!stubInfo)
2107 0 : return;
2108 :
2109 12 : CacheIRStubKey key(stubInfo);
2110 6 : if (!jitZone->putIonCacheIRStubInfo(lookup, key))
2111 0 : return;
2112 : }
2113 :
2114 21 : MOZ_ASSERT(stubInfo);
2115 :
2116 : // Ensure we don't attach duplicate stubs. This can happen if a stub failed
2117 : // for some reason and the IR generator doesn't check for exactly the same
2118 : // conditions.
2119 37 : for (IonICStub* stub = firstStub_; stub; stub = stub->next()) {
2120 16 : if (stub->stubInfo() != stubInfo)
2121 17 : continue;
2122 15 : bool updated = false;
2123 15 : if (!writer.stubDataEqualsMaybeUpdate(stub->stubDataStart(), &updated))
2124 15 : continue;
2125 0 : if (updated || (typeCheckInfo && typeCheckInfo->needsTypeBarrier())) {
2126 : // We updated a stub or have a stub that requires property type
2127 : // checks. In this case the stub will likely handle more cases in
2128 : // the future and we shouldn't deoptimize.
2129 0 : *attached = true;
2130 : }
2131 0 : return;
2132 : }
2133 :
2134 21 : size_t bytesNeeded = stubInfo->stubDataOffset() + stubInfo->stubDataSize();
2135 :
2136 : // Allocate the IonICStub in the optimized stub space. Ion stubs and
2137 : // CacheIRStubInfo instances for Ion stubs can be purged on GC. That's okay
2138 : // because the stub code is rooted separately when we make a VM call, and
2139 : // stub code should never access the IonICStub after making a VM call. The
2140 : // IonICStub::poison method poisons the stub to catch bugs in this area.
2141 21 : ICStubSpace* stubSpace = cx->zone()->jitZone()->optimizedStubSpace();
2142 21 : void* newStubMem = stubSpace->alloc(bytesNeeded);
2143 21 : if (!newStubMem)
2144 0 : return;
2145 :
2146 21 : IonICStub* newStub = new(newStubMem) IonICStub(fallbackLabel_.raw(), stubInfo);
2147 21 : writer.copyStubData(newStub->stubDataStart());
2148 :
2149 42 : JitContext jctx(cx, nullptr);
2150 42 : IonCacheIRCompiler compiler(cx, writer, this, ionScript, newStub, typeCheckInfo);
2151 21 : if (!compiler.init())
2152 0 : return;
2153 :
2154 21 : JitCode* code = compiler.compile();
2155 21 : if (!code)
2156 0 : return;
2157 :
2158 21 : attachStub(newStub, code);
2159 21 : *attached = true;
2160 9 : }
|