Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/CacheIRCompiler.h"
8 :
9 : #include "jit/IonIC.h"
10 : #include "jit/SharedICHelpers.h"
11 :
12 : #include "jscompartmentinlines.h"
13 :
14 : #include "jit/MacroAssembler-inl.h"
15 :
16 : using namespace js;
17 : using namespace js::jit;
18 :
19 : using mozilla::Maybe;
20 :
21 : ValueOperand
22 356 : CacheRegisterAllocator::useValueRegister(MacroAssembler& masm, ValOperandId op)
23 : {
24 356 : OperandLocation& loc = operandLocations_[op.id()];
25 :
26 356 : switch (loc.kind()) {
27 : case OperandLocation::ValueReg:
28 353 : currentOpRegs_.add(loc.valueReg());
29 353 : return loc.valueReg();
30 :
31 : case OperandLocation::ValueStack: {
32 0 : ValueOperand reg = allocateValueRegister(masm);
33 0 : popValue(masm, &loc, reg);
34 0 : return reg;
35 : }
36 :
37 : case OperandLocation::BaselineFrame: {
38 2 : ValueOperand reg = allocateValueRegister(masm);
39 2 : Address addr = addressOf(masm, loc.baselineFrameSlot());
40 2 : masm.loadValue(addr, reg);
41 2 : loc.setValueReg(reg);
42 2 : return reg;
43 : }
44 :
45 : case OperandLocation::Constant: {
46 0 : ValueOperand reg = allocateValueRegister(masm);
47 0 : masm.moveValue(loc.constant(), reg);
48 0 : loc.setValueReg(reg);
49 0 : return reg;
50 : }
51 :
52 : case OperandLocation::PayloadReg: {
53 : // Temporarily add the payload register to currentOpRegs_ so
54 : // allocateValueRegister will stay away from it.
55 1 : currentOpRegs_.add(loc.payloadReg());
56 1 : ValueOperand reg = allocateValueRegister(masm);
57 1 : masm.tagValue(loc.payloadType(), loc.payloadReg(), reg);
58 1 : currentOpRegs_.take(loc.payloadReg());
59 1 : availableRegs_.add(loc.payloadReg());
60 1 : loc.setValueReg(reg);
61 1 : return reg;
62 : }
63 :
64 : case OperandLocation::PayloadStack: {
65 0 : ValueOperand reg = allocateValueRegister(masm);
66 0 : popPayload(masm, &loc, reg.scratchReg());
67 0 : masm.tagValue(loc.payloadType(), reg.scratchReg(), reg);
68 0 : loc.setValueReg(reg);
69 0 : return reg;
70 : }
71 :
72 : case OperandLocation::DoubleReg: {
73 0 : ValueOperand reg = allocateValueRegister(masm);
74 0 : masm.boxDouble(loc.doubleReg(), reg);
75 0 : loc.setValueReg(reg);
76 0 : return reg;
77 : }
78 :
79 : case OperandLocation::Uninitialized:
80 0 : break;
81 : }
82 :
83 0 : MOZ_CRASH();
84 : }
85 :
86 : ValueOperand
87 58 : CacheRegisterAllocator::useFixedValueRegister(MacroAssembler& masm, ValOperandId valId,
88 : ValueOperand reg)
89 : {
90 58 : allocateFixedValueRegister(masm, reg);
91 :
92 58 : OperandLocation& loc = operandLocations_[valId.id()];
93 58 : switch (loc.kind()) {
94 : case OperandLocation::ValueReg:
95 36 : masm.moveValue(loc.valueReg(), reg);
96 36 : MOZ_ASSERT(!currentOpRegs_.aliases(loc.valueReg()), "Register shouldn't be in use");
97 36 : availableRegs_.add(loc.valueReg());
98 36 : break;
99 : case OperandLocation::ValueStack:
100 0 : popValue(masm, &loc, reg);
101 0 : break;
102 : case OperandLocation::BaselineFrame: {
103 22 : Address addr = addressOf(masm, loc.baselineFrameSlot());
104 22 : masm.loadValue(addr, reg);
105 22 : break;
106 : }
107 : case OperandLocation::Constant:
108 0 : masm.moveValue(loc.constant(), reg);
109 0 : break;
110 : case OperandLocation::PayloadReg:
111 0 : masm.tagValue(loc.payloadType(), loc.payloadReg(), reg);
112 0 : MOZ_ASSERT(!currentOpRegs_.has(loc.payloadReg()), "Register shouldn't be in use");
113 0 : availableRegs_.add(loc.payloadReg());
114 0 : break;
115 : case OperandLocation::PayloadStack:
116 0 : popPayload(masm, &loc, reg.scratchReg());
117 0 : masm.tagValue(loc.payloadType(), reg.scratchReg(), reg);
118 0 : break;
119 : case OperandLocation::DoubleReg:
120 0 : masm.boxDouble(loc.doubleReg(), reg);
121 0 : break;
122 : case OperandLocation::Uninitialized:
123 0 : MOZ_CRASH();
124 : }
125 :
126 58 : loc.setValueReg(reg);
127 58 : return reg;
128 : }
129 :
130 : Register
131 906 : CacheRegisterAllocator::useRegister(MacroAssembler& masm, TypedOperandId typedId)
132 : {
133 906 : OperandLocation& loc = operandLocations_[typedId.id()];
134 906 : switch (loc.kind()) {
135 : case OperandLocation::PayloadReg:
136 617 : currentOpRegs_.add(loc.payloadReg());
137 617 : return loc.payloadReg();
138 :
139 : case OperandLocation::ValueReg: {
140 : // It's possible the value is still boxed: as an optimization, we unbox
141 : // the first time we use a value as object.
142 280 : ValueOperand val = loc.valueReg();
143 280 : availableRegs_.add(val);
144 280 : Register reg = val.scratchReg();
145 280 : availableRegs_.take(reg);
146 280 : masm.unboxObject(val, reg);
147 280 : loc.setPayloadReg(reg, typedId.type());
148 280 : currentOpRegs_.add(reg);
149 280 : return reg;
150 : }
151 :
152 : case OperandLocation::PayloadStack: {
153 9 : Register reg = allocateRegister(masm);
154 9 : popPayload(masm, &loc, reg);
155 9 : return reg;
156 : }
157 :
158 : case OperandLocation::ValueStack: {
159 : // The value is on the stack, but boxed. If it's on top of the stack we
160 : // unbox it and then remove it from the stack, else we just unbox.
161 0 : Register reg = allocateRegister(masm);
162 0 : if (loc.valueStack() == stackPushed_) {
163 0 : masm.unboxObject(Address(masm.getStackPointer(), 0), reg);
164 0 : masm.addToStackPtr(Imm32(sizeof(js::Value)));
165 0 : MOZ_ASSERT(stackPushed_ >= sizeof(js::Value));
166 0 : stackPushed_ -= sizeof(js::Value);
167 : } else {
168 0 : MOZ_ASSERT(loc.valueStack() < stackPushed_);
169 0 : masm.unboxObject(Address(masm.getStackPointer(), stackPushed_ - loc.valueStack()),
170 0 : reg);
171 : }
172 0 : loc.setPayloadReg(reg, typedId.type());
173 0 : return reg;
174 : }
175 :
176 : case OperandLocation::BaselineFrame: {
177 0 : Register reg = allocateRegister(masm);
178 0 : Address addr = addressOf(masm, loc.baselineFrameSlot());
179 0 : masm.unboxNonDouble(addr, reg);
180 0 : loc.setPayloadReg(reg, typedId.type());
181 0 : return reg;
182 : };
183 :
184 : case OperandLocation::Constant: {
185 0 : Value v = loc.constant();
186 0 : Register reg = allocateRegister(masm);
187 0 : if (v.isString())
188 0 : masm.movePtr(ImmGCPtr(v.toString()), reg);
189 0 : else if (v.isSymbol())
190 0 : masm.movePtr(ImmGCPtr(v.toSymbol()), reg);
191 : else
192 0 : MOZ_CRASH("Unexpected Value");
193 0 : loc.setPayloadReg(reg, v.extractNonDoubleType());
194 0 : return reg;
195 : }
196 :
197 : case OperandLocation::DoubleReg:
198 : case OperandLocation::Uninitialized:
199 0 : break;
200 : }
201 :
202 0 : MOZ_CRASH();
203 : }
204 :
205 : ConstantOrRegister
206 0 : CacheRegisterAllocator::useConstantOrRegister(MacroAssembler& masm, ValOperandId val)
207 : {
208 0 : OperandLocation& loc = operandLocations_[val.id()];
209 0 : switch (loc.kind()) {
210 : case OperandLocation::Constant:
211 0 : return loc.constant();
212 :
213 : case OperandLocation::PayloadReg:
214 : case OperandLocation::PayloadStack: {
215 0 : JSValueType payloadType = loc.payloadType();
216 0 : Register reg = useRegister(masm, TypedOperandId(val, payloadType));
217 0 : return TypedOrValueRegister(MIRTypeFromValueType(payloadType), AnyRegister(reg));
218 : }
219 :
220 : case OperandLocation::ValueReg:
221 : case OperandLocation::ValueStack:
222 : case OperandLocation::BaselineFrame:
223 0 : return TypedOrValueRegister(useValueRegister(masm, val));
224 :
225 : case OperandLocation::DoubleReg:
226 0 : return TypedOrValueRegister(MIRType::Double, AnyRegister(loc.doubleReg()));
227 :
228 : case OperandLocation::Uninitialized:
229 0 : break;
230 : }
231 :
232 0 : MOZ_CRASH();
233 : }
234 :
235 : Register
236 181 : CacheRegisterAllocator::defineRegister(MacroAssembler& masm, TypedOperandId typedId)
237 : {
238 181 : OperandLocation& loc = operandLocations_[typedId.id()];
239 181 : MOZ_ASSERT(loc.kind() == OperandLocation::Uninitialized);
240 :
241 181 : Register reg = allocateRegister(masm);
242 181 : loc.setPayloadReg(reg, typedId.type());
243 181 : return reg;
244 : }
245 :
246 : ValueOperand
247 5 : CacheRegisterAllocator::defineValueRegister(MacroAssembler& masm, ValOperandId val)
248 : {
249 5 : OperandLocation& loc = operandLocations_[val.id()];
250 5 : MOZ_ASSERT(loc.kind() == OperandLocation::Uninitialized);
251 :
252 5 : ValueOperand reg = allocateValueRegister(masm);
253 5 : loc.setValueReg(reg);
254 5 : return reg;
255 : }
256 :
257 : void
258 306 : CacheRegisterAllocator::freeDeadOperandLocations(MacroAssembler& masm)
259 : {
260 : // See if any operands are dead so we can reuse their registers. Note that
261 : // we skip the input operands, as those are also used by failure paths, and
262 : // we currently don't track those uses.
263 565 : for (size_t i = writer_.numInputOperands(); i < operandLocations_.length(); i++) {
264 259 : if (!writer_.operandIsDead(i, currentInstruction_))
265 82 : continue;
266 :
267 177 : OperandLocation& loc = operandLocations_[i];
268 177 : switch (loc.kind()) {
269 : case OperandLocation::PayloadReg:
270 119 : availableRegs_.add(loc.payloadReg());
271 119 : break;
272 : case OperandLocation::ValueReg:
273 0 : availableRegs_.add(loc.valueReg());
274 0 : break;
275 : case OperandLocation::PayloadStack:
276 0 : masm.propagateOOM(freePayloadSlots_.append(loc.payloadStack()));
277 0 : break;
278 : case OperandLocation::ValueStack:
279 0 : masm.propagateOOM(freeValueSlots_.append(loc.valueStack()));
280 0 : break;
281 : case OperandLocation::Uninitialized:
282 : case OperandLocation::BaselineFrame:
283 : case OperandLocation::Constant:
284 : case OperandLocation::DoubleReg:
285 58 : break;
286 : }
287 177 : loc.setUninitialized();
288 : }
289 306 : }
290 :
291 : void
292 939 : CacheRegisterAllocator::discardStack(MacroAssembler& masm)
293 : {
294 : // This should only be called when we are no longer using the operands,
295 : // as we're discarding everything from the native stack. Set all operand
296 : // locations to Uninitialized to catch bugs.
297 3327 : for (size_t i = 0; i < operandLocations_.length(); i++)
298 2388 : operandLocations_[i].setUninitialized();
299 :
300 939 : if (stackPushed_ > 0) {
301 31 : masm.addToStackPtr(Imm32(stackPushed_));
302 31 : stackPushed_ = 0;
303 : }
304 939 : freePayloadSlots_.clear();
305 939 : freeValueSlots_.clear();
306 939 : }
307 :
308 : Register
309 862 : CacheRegisterAllocator::allocateRegister(MacroAssembler& masm)
310 : {
311 862 : if (availableRegs_.empty())
312 11 : freeDeadOperandLocations(masm);
313 :
314 862 : if (availableRegs_.empty()) {
315 : // Still no registers available, try to spill unused operands to
316 : // the stack.
317 21 : for (size_t i = 0; i < operandLocations_.length(); i++) {
318 20 : OperandLocation& loc = operandLocations_[i];
319 20 : if (loc.kind() == OperandLocation::PayloadReg) {
320 18 : Register reg = loc.payloadReg();
321 18 : if (currentOpRegs_.has(reg))
322 9 : continue;
323 :
324 9 : spillOperandToStack(masm, &loc);
325 9 : availableRegs_.add(reg);
326 9 : break; // We got a register, so break out of the loop.
327 : }
328 2 : if (loc.kind() == OperandLocation::ValueReg) {
329 1 : ValueOperand reg = loc.valueReg();
330 1 : if (currentOpRegs_.aliases(reg))
331 0 : continue;
332 :
333 1 : spillOperandToStack(masm, &loc);
334 1 : availableRegs_.add(reg);
335 1 : break; // Break out of the loop.
336 : }
337 : }
338 : }
339 :
340 862 : if (availableRegs_.empty() && !availableRegsAfterSpill_.empty()) {
341 1 : Register reg = availableRegsAfterSpill_.takeAny();
342 1 : masm.push(reg);
343 1 : stackPushed_ += sizeof(uintptr_t);
344 :
345 1 : masm.propagateOOM(spilledRegs_.append(SpilledRegister(reg, stackPushed_)));
346 :
347 1 : availableRegs_.add(reg);
348 : }
349 :
350 : // At this point, there must be a free register.
351 862 : MOZ_RELEASE_ASSERT(!availableRegs_.empty());
352 :
353 862 : Register reg = availableRegs_.takeAny();
354 862 : currentOpRegs_.add(reg);
355 862 : return reg;
356 : }
357 :
358 : void
359 295 : CacheRegisterAllocator::allocateFixedRegister(MacroAssembler& masm, Register reg)
360 : {
361 : // Fixed registers should be allocated first, to ensure they're
362 : // still available.
363 295 : MOZ_ASSERT(!currentOpRegs_.has(reg), "Register is in use");
364 :
365 295 : freeDeadOperandLocations(masm);
366 :
367 295 : if (availableRegs_.has(reg)) {
368 25 : availableRegs_.take(reg);
369 25 : currentOpRegs_.add(reg);
370 25 : return;
371 : }
372 :
373 : // The register must be used by some operand. Spill it to the stack.
374 346 : for (size_t i = 0; i < operandLocations_.length(); i++) {
375 346 : OperandLocation& loc = operandLocations_[i];
376 346 : if (loc.kind() == OperandLocation::PayloadReg) {
377 244 : if (loc.payloadReg() != reg)
378 73 : continue;
379 :
380 171 : spillOperandToStackOrRegister(masm, &loc);
381 171 : currentOpRegs_.add(reg);
382 171 : return;
383 : }
384 102 : if (loc.kind() == OperandLocation::ValueReg) {
385 100 : if (!loc.valueReg().aliases(reg))
386 1 : continue;
387 :
388 99 : ValueOperand valueReg = loc.valueReg();
389 99 : spillOperandToStackOrRegister(masm, &loc);
390 :
391 99 : availableRegs_.add(valueReg);
392 99 : availableRegs_.take(reg);
393 99 : currentOpRegs_.add(reg);
394 99 : return;
395 : }
396 : }
397 :
398 0 : MOZ_CRASH("Invalid register");
399 : }
400 :
401 : void
402 236 : CacheRegisterAllocator::allocateFixedValueRegister(MacroAssembler& masm, ValueOperand reg)
403 : {
404 : #ifdef JS_NUNBOX32
405 : allocateFixedRegister(masm, reg.payloadReg());
406 : allocateFixedRegister(masm, reg.typeReg());
407 : #else
408 236 : allocateFixedRegister(masm, reg.valueReg());
409 : #endif
410 236 : }
411 :
412 : ValueOperand
413 8 : CacheRegisterAllocator::allocateValueRegister(MacroAssembler& masm)
414 : {
415 : #ifdef JS_NUNBOX32
416 : Register reg1 = allocateRegister(masm);
417 : Register reg2 = allocateRegister(masm);
418 : return ValueOperand(reg1, reg2);
419 : #else
420 8 : Register reg = allocateRegister(masm);
421 8 : return ValueOperand(reg);
422 : #endif
423 : }
424 :
425 : bool
426 266 : CacheRegisterAllocator::init()
427 : {
428 266 : if (!origInputLocations_.resize(writer_.numInputOperands()))
429 0 : return false;
430 266 : if (!operandLocations_.resize(writer_.numOperandIds()))
431 0 : return false;
432 266 : return true;
433 : }
434 :
435 : void
436 21 : CacheRegisterAllocator::initAvailableRegsAfterSpill()
437 : {
438 : // Registers not in availableRegs_ and not used by input operands are
439 : // available after being spilled.
440 42 : availableRegsAfterSpill_.set() =
441 42 : GeneralRegisterSet::Intersect(GeneralRegisterSet::Not(availableRegs_.set()),
442 42 : GeneralRegisterSet::Not(inputRegisterSet()));
443 21 : }
444 :
445 : void
446 21 : CacheRegisterAllocator::fixupAliasedInputs(MacroAssembler& masm)
447 : {
448 : // If IC inputs alias each other, make sure they are stored in different
449 : // locations so we don't have to deal with this complexity in the rest of
450 : // the allocator.
451 : //
452 : // Note that this can happen in IonMonkey with something like |o.foo = o|
453 : // or |o[i] = i|.
454 :
455 21 : size_t numInputs = writer_.numInputOperands();
456 21 : MOZ_ASSERT(origInputLocations_.length() == numInputs);
457 :
458 37 : for (size_t i = 1; i < numInputs; i++) {
459 16 : OperandLocation& loc1 = operandLocations_[i];
460 16 : if (!loc1.isInRegister())
461 0 : continue;
462 :
463 32 : for (size_t j = 0; j < i; j++) {
464 16 : OperandLocation& loc2 = operandLocations_[j];
465 16 : if (!loc1.aliasesReg(loc2))
466 16 : continue;
467 :
468 : // loc1 and loc2 alias so we spill one of them. If one is a
469 : // ValueReg and the other is a PayloadReg, we have to spill the
470 : // PayloadReg: spilling the ValueReg instead would leave its type
471 : // register unallocated on 32-bit platforms.
472 0 : if (loc1.kind() == OperandLocation::ValueReg) {
473 0 : MOZ_ASSERT_IF(loc2.kind() == OperandLocation::ValueReg,
474 : loc1 == loc2);
475 0 : spillOperandToStack(masm, &loc2);
476 : } else {
477 0 : MOZ_ASSERT(loc1.kind() == OperandLocation::PayloadReg);
478 0 : spillOperandToStack(masm, &loc1);
479 0 : break; // Spilled loc1, so nothing else will alias it.
480 : }
481 : }
482 : }
483 21 : }
484 :
485 : GeneralRegisterSet
486 21 : CacheRegisterAllocator::inputRegisterSet() const
487 : {
488 21 : MOZ_ASSERT(origInputLocations_.length() == writer_.numInputOperands());
489 :
490 21 : AllocatableGeneralRegisterSet result;
491 58 : for (size_t i = 0; i < writer_.numInputOperands(); i++) {
492 37 : const OperandLocation& loc = operandLocations_[i];
493 37 : MOZ_ASSERT(loc == origInputLocations_[i]);
494 :
495 37 : switch (loc.kind()) {
496 : case OperandLocation::PayloadReg:
497 36 : result.addUnchecked(loc.payloadReg());
498 36 : continue;
499 : case OperandLocation::ValueReg:
500 1 : result.addUnchecked(loc.valueReg());
501 1 : continue;
502 : case OperandLocation::PayloadStack:
503 : case OperandLocation::ValueStack:
504 : case OperandLocation::BaselineFrame:
505 : case OperandLocation::Constant:
506 : case OperandLocation::DoubleReg:
507 0 : continue;
508 : case OperandLocation::Uninitialized:
509 0 : break;
510 : }
511 0 : MOZ_CRASH("Invalid kind");
512 : }
513 :
514 21 : return result.set();
515 : }
516 :
517 : JSValueType
518 364 : CacheRegisterAllocator::knownType(ValOperandId val) const
519 : {
520 364 : const OperandLocation& loc = operandLocations_[val.id()];
521 :
522 364 : switch (loc.kind()) {
523 : case OperandLocation::ValueReg:
524 : case OperandLocation::ValueStack:
525 : case OperandLocation::BaselineFrame:
526 329 : return JSVAL_TYPE_UNKNOWN;
527 :
528 : case OperandLocation::PayloadStack:
529 : case OperandLocation::PayloadReg:
530 35 : return loc.payloadType();
531 :
532 : case OperandLocation::Constant:
533 0 : return loc.constant().isDouble()
534 0 : ? JSVAL_TYPE_DOUBLE
535 0 : : loc.constant().extractNonDoubleType();
536 :
537 : case OperandLocation::DoubleReg:
538 0 : return JSVAL_TYPE_DOUBLE;
539 :
540 : case OperandLocation::Uninitialized:
541 0 : break;
542 : }
543 :
544 0 : MOZ_CRASH("Invalid kind");
545 : }
546 :
547 : void
548 37 : CacheRegisterAllocator::initInputLocation(size_t i, const TypedOrValueRegister& reg)
549 : {
550 37 : if (reg.hasValue()) {
551 1 : initInputLocation(i, reg.valueReg());
552 36 : } else if (reg.typedReg().isFloat()) {
553 0 : MOZ_ASSERT(reg.type() == MIRType::Double);
554 0 : initInputLocation(i, reg.typedReg().fpu());
555 : } else {
556 36 : initInputLocation(i, reg.typedReg().gpr(), ValueTypeFromMIRType(reg.type()));
557 : }
558 37 : }
559 :
560 : void
561 16 : CacheRegisterAllocator::initInputLocation(size_t i, const ConstantOrRegister& value)
562 : {
563 16 : if (value.constant())
564 0 : initInputLocation(i, value.value());
565 : else
566 16 : initInputLocation(i, value.reg());
567 16 : }
568 :
569 : void
570 35 : CacheRegisterAllocator::spillOperandToStack(MacroAssembler& masm, OperandLocation* loc)
571 : {
572 35 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
573 :
574 35 : if (loc->kind() == OperandLocation::ValueReg) {
575 18 : if (!freeValueSlots_.empty()) {
576 0 : uint32_t stackPos = freeValueSlots_.popCopy();
577 0 : MOZ_ASSERT(stackPos <= stackPushed_);
578 0 : masm.storeValue(loc->valueReg(), Address(masm.getStackPointer(),
579 0 : stackPushed_ - stackPos));
580 0 : loc->setValueStack(stackPos);
581 0 : return;
582 : }
583 18 : stackPushed_ += sizeof(js::Value);
584 18 : masm.pushValue(loc->valueReg());
585 18 : loc->setValueStack(stackPushed_);
586 18 : return;
587 : }
588 :
589 17 : MOZ_ASSERT(loc->kind() == OperandLocation::PayloadReg);
590 :
591 17 : if (!freePayloadSlots_.empty()) {
592 0 : uint32_t stackPos = freePayloadSlots_.popCopy();
593 0 : MOZ_ASSERT(stackPos <= stackPushed_);
594 0 : masm.storePtr(loc->payloadReg(), Address(masm.getStackPointer(),
595 0 : stackPushed_ - stackPos));
596 0 : loc->setPayloadStack(stackPos, loc->payloadType());
597 0 : return;
598 : }
599 17 : stackPushed_ += sizeof(uintptr_t);
600 17 : masm.push(loc->payloadReg());
601 17 : loc->setPayloadStack(stackPushed_, loc->payloadType());
602 : }
603 :
604 : void
605 270 : CacheRegisterAllocator::spillOperandToStackOrRegister(MacroAssembler& masm, OperandLocation* loc)
606 : {
607 270 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
608 :
609 : // If enough registers are available, use them.
610 270 : if (loc->kind() == OperandLocation::ValueReg) {
611 : static const size_t BoxPieces = sizeof(Value) / sizeof(uintptr_t);
612 99 : if (availableRegs_.set().size() >= BoxPieces) {
613 99 : ValueOperand reg = availableRegs_.takeAnyValue();
614 99 : masm.moveValue(loc->valueReg(), reg);
615 99 : loc->setValueReg(reg);
616 99 : return;
617 : }
618 : } else {
619 171 : MOZ_ASSERT(loc->kind() == OperandLocation::PayloadReg);
620 171 : if (!availableRegs_.empty()) {
621 163 : Register reg = availableRegs_.takeAny();
622 163 : masm.movePtr(loc->payloadReg(), reg);
623 163 : loc->setPayloadReg(reg, loc->payloadType());
624 163 : return;
625 : }
626 : }
627 :
628 : // Not enough registers available, spill to the stack.
629 8 : spillOperandToStack(masm, loc);
630 : }
631 :
632 : void
633 28 : CacheRegisterAllocator::popPayload(MacroAssembler& masm, OperandLocation* loc, Register dest)
634 : {
635 28 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
636 28 : MOZ_ASSERT(stackPushed_ >= sizeof(uintptr_t));
637 :
638 : // The payload is on the stack. If it's on top of the stack we can just
639 : // pop it, else we emit a load.
640 28 : if (loc->payloadStack() == stackPushed_) {
641 18 : masm.pop(dest);
642 18 : stackPushed_ -= sizeof(uintptr_t);
643 : } else {
644 10 : MOZ_ASSERT(loc->payloadStack() < stackPushed_);
645 10 : masm.loadPtr(Address(masm.getStackPointer(), stackPushed_ - loc->payloadStack()), dest);
646 10 : masm.propagateOOM(freePayloadSlots_.append(loc->payloadStack()));
647 : }
648 :
649 28 : loc->setPayloadReg(dest, loc->payloadType());
650 28 : }
651 :
652 : void
653 7 : CacheRegisterAllocator::popValue(MacroAssembler& masm, OperandLocation* loc, ValueOperand dest)
654 : {
655 7 : MOZ_ASSERT(loc >= operandLocations_.begin() && loc < operandLocations_.end());
656 7 : MOZ_ASSERT(stackPushed_ >= sizeof(js::Value));
657 :
658 : // The Value is on the stack. If it's on top of the stack we can just
659 : // pop it, else we emit a load.
660 7 : if (loc->valueStack() == stackPushed_) {
661 7 : masm.popValue(dest);
662 7 : stackPushed_ -= sizeof(js::Value);
663 : } else {
664 0 : MOZ_ASSERT(loc->valueStack() < stackPushed_);
665 0 : masm.loadValue(Address(masm.getStackPointer(), stackPushed_ - loc->valueStack()), dest);
666 0 : masm.propagateOOM(freeValueSlots_.append(loc->valueStack()));
667 : }
668 :
669 7 : loc->setValueReg(dest);
670 7 : }
671 :
672 : bool
673 269 : OperandLocation::aliasesReg(const OperandLocation& other) const
674 : {
675 269 : MOZ_ASSERT(&other != this);
676 :
677 269 : switch (other.kind_) {
678 : case PayloadReg:
679 94 : return aliasesReg(other.payloadReg());
680 : case ValueReg:
681 106 : return aliasesReg(other.valueReg());
682 : case PayloadStack:
683 : case ValueStack:
684 : case BaselineFrame:
685 : case Constant:
686 : case DoubleReg:
687 69 : return false;
688 : case Uninitialized:
689 0 : break;
690 : }
691 :
692 0 : MOZ_CRASH("Invalid kind");
693 : }
694 :
695 : void
696 600 : CacheRegisterAllocator::restoreInputState(MacroAssembler& masm, bool shouldDiscardStack)
697 : {
698 600 : size_t numInputOperands = origInputLocations_.length();
699 600 : MOZ_ASSERT(writer_.numInputOperands() == numInputOperands);
700 :
701 1673 : for (size_t j = 0; j < numInputOperands; j++) {
702 1073 : const OperandLocation& dest = origInputLocations_[j];
703 1073 : OperandLocation& cur = operandLocations_[j];
704 1073 : if (dest == cur)
705 647 : continue;
706 :
707 1278 : auto autoAssign = mozilla::MakeScopeExit([&] { cur = dest; });
708 :
709 : // We have a cycle if a destination register will be used later
710 : // as source register. If that happens, just push the current value
711 : // on the stack and later get it from there.
712 679 : for (size_t k = j + 1; k < numInputOperands; k++) {
713 253 : OperandLocation& laterSource = operandLocations_[k];
714 253 : if (dest.aliasesReg(laterSource))
715 17 : spillOperandToStack(masm, &laterSource);
716 : }
717 :
718 426 : if (dest.kind() == OperandLocation::ValueReg) {
719 : // We have to restore a Value register.
720 390 : switch (cur.kind()) {
721 : case OperandLocation::ValueReg:
722 14 : masm.moveValue(cur.valueReg(), dest.valueReg());
723 14 : continue;
724 : case OperandLocation::PayloadReg:
725 369 : masm.tagValue(cur.payloadType(), cur.payloadReg(), dest.valueReg());
726 369 : continue;
727 : case OperandLocation::PayloadStack: {
728 0 : Register scratch = dest.valueReg().scratchReg();
729 0 : popPayload(masm, &cur, scratch);
730 0 : masm.tagValue(cur.payloadType(), scratch, dest.valueReg());
731 0 : continue;
732 : }
733 : case OperandLocation::ValueStack:
734 7 : popValue(masm, &cur, dest.valueReg());
735 7 : continue;
736 : case OperandLocation::Constant:
737 : case OperandLocation::BaselineFrame:
738 : case OperandLocation::DoubleReg:
739 : case OperandLocation::Uninitialized:
740 0 : break;
741 : }
742 36 : } else if (dest.kind() == OperandLocation::PayloadReg) {
743 : // We have to restore a payload register.
744 24 : switch (cur.kind()) {
745 : case OperandLocation::ValueReg:
746 2 : MOZ_ASSERT(dest.payloadType() != JSVAL_TYPE_DOUBLE);
747 2 : masm.unboxNonDouble(cur.valueReg(), dest.payloadReg());
748 2 : continue;
749 : case OperandLocation::PayloadReg:
750 3 : MOZ_ASSERT(cur.payloadType() == dest.payloadType());
751 3 : masm.mov(cur.payloadReg(), dest.payloadReg());
752 3 : continue;
753 : case OperandLocation::PayloadStack: {
754 19 : MOZ_ASSERT(cur.payloadType() == dest.payloadType());
755 19 : popPayload(masm, &cur, dest.payloadReg());
756 19 : continue;
757 : }
758 : case OperandLocation::ValueStack:
759 0 : MOZ_ASSERT(stackPushed_ >= sizeof(js::Value));
760 0 : MOZ_ASSERT(cur.valueStack() <= stackPushed_);
761 0 : MOZ_ASSERT(dest.payloadType() != JSVAL_TYPE_DOUBLE);
762 0 : masm.unboxNonDouble(Address(masm.getStackPointer(), stackPushed_ - cur.valueStack()),
763 0 : dest.payloadReg());
764 0 : continue;
765 : case OperandLocation::Constant:
766 : case OperandLocation::BaselineFrame:
767 : case OperandLocation::DoubleReg:
768 : case OperandLocation::Uninitialized:
769 0 : break;
770 : }
771 36 : } else if (dest.kind() == OperandLocation::Constant ||
772 12 : dest.kind() == OperandLocation::BaselineFrame ||
773 0 : dest.kind() == OperandLocation::DoubleReg)
774 : {
775 : // Nothing to do.
776 12 : continue;
777 : }
778 :
779 0 : MOZ_CRASH("Invalid kind");
780 : }
781 :
782 602 : for (const SpilledRegister& spill : spilledRegs_) {
783 2 : MOZ_ASSERT(stackPushed_ >= sizeof(uintptr_t));
784 :
785 2 : if (spill.stackPushed == stackPushed_) {
786 2 : masm.pop(spill.reg);
787 2 : stackPushed_ -= sizeof(uintptr_t);
788 : } else {
789 0 : MOZ_ASSERT(spill.stackPushed < stackPushed_);
790 0 : masm.loadPtr(Address(masm.getStackPointer(), stackPushed_ - spill.stackPushed),
791 0 : spill.reg);
792 : }
793 : }
794 :
795 600 : if (shouldDiscardStack)
796 600 : discardStack(masm);
797 600 : }
798 :
799 : size_t
800 12368 : CacheIRStubInfo::stubDataSize() const
801 : {
802 12368 : size_t field = 0;
803 12368 : size_t size = 0;
804 : while (true) {
805 47535 : StubField::Type type = fieldType(field++);
806 47535 : if (type == StubField::Type::Limit)
807 24736 : return size;
808 35167 : size += StubField::sizeInBytes(type);
809 35167 : }
810 : }
811 :
812 : void
813 0 : CacheIRStubInfo::copyStubData(ICStub* src, ICStub* dest) const
814 : {
815 0 : uint8_t* srcBytes = reinterpret_cast<uint8_t*>(src);
816 0 : uint8_t* destBytes = reinterpret_cast<uint8_t*>(dest);
817 :
818 0 : size_t field = 0;
819 0 : size_t offset = 0;
820 : while (true) {
821 0 : StubField::Type type = fieldType(field);
822 0 : switch (type) {
823 : case StubField::Type::RawWord:
824 0 : *reinterpret_cast<uintptr_t*>(destBytes + offset) =
825 0 : *reinterpret_cast<uintptr_t*>(srcBytes + offset);
826 0 : break;
827 : case StubField::Type::RawInt64:
828 : case StubField::Type::DOMExpandoGeneration:
829 0 : *reinterpret_cast<uint64_t*>(destBytes + offset) =
830 0 : *reinterpret_cast<uint64_t*>(srcBytes + offset);
831 0 : break;
832 : case StubField::Type::Shape:
833 0 : getStubField<ICStub, Shape*>(dest, offset).init(getStubField<ICStub, Shape*>(src, offset));
834 0 : break;
835 : case StubField::Type::JSObject:
836 0 : getStubField<ICStub, JSObject*>(dest, offset).init(getStubField<ICStub, JSObject*>(src, offset));
837 0 : break;
838 : case StubField::Type::ObjectGroup:
839 0 : getStubField<ICStub, ObjectGroup*>(dest, offset).init(getStubField<ICStub, ObjectGroup*>(src, offset));
840 0 : break;
841 : case StubField::Type::Symbol:
842 0 : getStubField<ICStub, JS::Symbol*>(dest, offset).init(getStubField<ICStub, JS::Symbol*>(src, offset));
843 0 : break;
844 : case StubField::Type::String:
845 0 : getStubField<ICStub, JSString*>(dest, offset).init(getStubField<ICStub, JSString*>(src, offset));
846 0 : break;
847 : case StubField::Type::Id:
848 0 : getStubField<ICStub, jsid>(dest, offset).init(getStubField<ICStub, jsid>(src, offset));
849 0 : break;
850 : case StubField::Type::Value:
851 0 : getStubField<ICStub, Value>(dest, offset).init(getStubField<ICStub, Value>(src, offset));
852 0 : break;
853 : case StubField::Type::Limit:
854 0 : return; // Done.
855 : }
856 0 : field++;
857 0 : offset += StubField::sizeInBytes(type);
858 0 : }
859 : }
860 :
861 : template <typename T>
862 : static GCPtr<T>*
863 12458 : AsGCPtr(uintptr_t* ptr)
864 : {
865 12458 : return reinterpret_cast<GCPtr<T>*>(ptr);
866 : }
867 :
868 : template<class Stub, class T>
869 : GCPtr<T>&
870 190 : CacheIRStubInfo::getStubField(Stub* stub, uint32_t offset) const
871 : {
872 190 : uint8_t* stubData = (uint8_t*)stub + stubDataOffset_;
873 190 : MOZ_ASSERT(uintptr_t(stubData) % sizeof(uintptr_t) == 0);
874 :
875 190 : return *AsGCPtr<T>((uintptr_t*)(stubData + offset));
876 : }
877 :
878 : template GCPtr<Shape*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
879 : template GCPtr<ObjectGroup*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
880 : template GCPtr<JSObject*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
881 : template GCPtr<JSString*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
882 : template GCPtr<JS::Symbol*>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
883 : template GCPtr<JS::Value>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
884 : template GCPtr<jsid>& CacheIRStubInfo::getStubField<ICStub>(ICStub* stub, uint32_t offset) const;
885 :
886 : template <typename T, typename V>
887 : static void
888 12268 : InitGCPtr(uintptr_t* ptr, V val)
889 : {
890 12268 : AsGCPtr<T>(ptr)->init(mozilla::BitwiseCast<T>(val));
891 12268 : }
892 :
893 : void
894 6007 : CacheIRWriter::copyStubData(uint8_t* dest) const
895 : {
896 6007 : MOZ_ASSERT(!failed());
897 :
898 6007 : uintptr_t* destWords = reinterpret_cast<uintptr_t*>(dest);
899 :
900 23316 : for (const StubField& field : stubFields_) {
901 17309 : switch (field.type()) {
902 : case StubField::Type::RawWord:
903 5041 : *destWords = field.asWord();
904 5041 : break;
905 : case StubField::Type::Shape:
906 8088 : InitGCPtr<Shape*>(destWords, field.asWord());
907 8088 : break;
908 : case StubField::Type::JSObject:
909 1464 : InitGCPtr<JSObject*>(destWords, field.asWord());
910 1464 : break;
911 : case StubField::Type::ObjectGroup:
912 1598 : InitGCPtr<ObjectGroup*>(destWords, field.asWord());
913 1598 : break;
914 : case StubField::Type::Symbol:
915 119 : InitGCPtr<JS::Symbol*>(destWords, field.asWord());
916 119 : break;
917 : case StubField::Type::String:
918 881 : InitGCPtr<JSString*>(destWords, field.asWord());
919 881 : break;
920 : case StubField::Type::Id:
921 118 : InitGCPtr<jsid>(destWords, field.asWord());
922 118 : break;
923 : case StubField::Type::RawInt64:
924 : case StubField::Type::DOMExpandoGeneration:
925 0 : *reinterpret_cast<uint64_t*>(destWords) = field.asInt64();
926 0 : break;
927 : case StubField::Type::Value:
928 0 : InitGCPtr<JS::Value>(destWords, field.asInt64());
929 0 : break;
930 : case StubField::Type::Limit:
931 0 : MOZ_CRASH("Invalid type");
932 : }
933 17309 : destWords += StubField::sizeInBytes(field.type()) / sizeof(uintptr_t);
934 : }
935 6007 : }
936 :
937 : template <typename T>
938 : void
939 41 : jit::TraceCacheIRStub(JSTracer* trc, T* stub, const CacheIRStubInfo* stubInfo)
940 : {
941 41 : uint32_t field = 0;
942 41 : size_t offset = 0;
943 167 : while (true) {
944 208 : StubField::Type fieldType = stubInfo->fieldType(field);
945 208 : switch (fieldType) {
946 : case StubField::Type::RawWord:
947 : case StubField::Type::RawInt64:
948 : case StubField::Type::DOMExpandoGeneration:
949 36 : break;
950 : case StubField::Type::Shape:
951 80 : TraceNullableEdge(trc, &stubInfo->getStubField<T, Shape*>(stub, offset),
952 : "cacheir-shape");
953 80 : break;
954 : case StubField::Type::ObjectGroup:
955 27 : TraceNullableEdge(trc, &stubInfo->getStubField<T, ObjectGroup*>(stub, offset),
956 : "cacheir-group");
957 27 : break;
958 : case StubField::Type::JSObject:
959 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, JSObject*>(stub, offset),
960 : "cacheir-object");
961 0 : break;
962 : case StubField::Type::Symbol:
963 0 : TraceNullableEdge(trc, &stubInfo->getStubField<T, JS::Symbol*>(stub, offset),
964 : "cacheir-symbol");
965 0 : break;
966 : case StubField::Type::String:
967 24 : TraceNullableEdge(trc, &stubInfo->getStubField<T, JSString*>(stub, offset),
968 : "cacheir-string");
969 24 : break;
970 : case StubField::Type::Id:
971 0 : TraceEdge(trc, &stubInfo->getStubField<T, jsid>(stub, offset), "cacheir-id");
972 0 : break;
973 : case StubField::Type::Value:
974 0 : TraceEdge(trc, &stubInfo->getStubField<T, JS::Value>(stub, offset),
975 : "cacheir-value");
976 0 : break;
977 : case StubField::Type::Limit:
978 82 : return; // Done.
979 : }
980 167 : field++;
981 167 : offset += StubField::sizeInBytes(fieldType);
982 : }
983 : }
984 :
985 : template
986 : void jit::TraceCacheIRStub(JSTracer* trc, ICStub* stub, const CacheIRStubInfo* stubInfo);
987 :
988 : template
989 : void jit::TraceCacheIRStub(JSTracer* trc, IonICStub* stub, const CacheIRStubInfo* stubInfo);
990 :
991 : bool
992 3358 : CacheIRWriter::stubDataEqualsMaybeUpdate(uint8_t* stubData, bool* updated) const
993 : {
994 3358 : MOZ_ASSERT(!failed());
995 :
996 3358 : *updated = false;
997 3358 : const uintptr_t* stubDataWords = reinterpret_cast<const uintptr_t*>(stubData);
998 :
999 : // If DOMExpandoGeneration fields are different but all other stub fields
1000 : // are exactly the same, we overwrite the old stub data instead of attaching
1001 : // a new stub, as the old stub is never going to succeed. This works because
1002 : // even Ion stubs read the DOMExpandoGeneration field from the stub instead
1003 : // of baking it in.
1004 3358 : bool expandoGenerationIsDifferent = false;
1005 :
1006 5242 : for (const StubField& field : stubFields_) {
1007 4867 : if (field.sizeIsWord()) {
1008 4867 : if (field.asWord() != *stubDataWords)
1009 2983 : return false;
1010 1884 : stubDataWords++;
1011 1884 : continue;
1012 : }
1013 :
1014 0 : if (field.asInt64() != *reinterpret_cast<const uint64_t*>(stubDataWords)) {
1015 0 : if (field.type() != StubField::Type::DOMExpandoGeneration)
1016 0 : return false;
1017 0 : expandoGenerationIsDifferent = true;
1018 : }
1019 0 : stubDataWords += sizeof(uint64_t) / sizeof(uintptr_t);
1020 : }
1021 :
1022 375 : if (expandoGenerationIsDifferent) {
1023 0 : copyStubData(stubData);
1024 0 : *updated = true;
1025 : }
1026 :
1027 375 : return true;
1028 : }
1029 :
1030 : HashNumber
1031 6632 : CacheIRStubKey::hash(const CacheIRStubKey::Lookup& l)
1032 : {
1033 6632 : HashNumber hash = mozilla::HashBytes(l.code, l.length);
1034 6632 : hash = mozilla::AddToHash(hash, uint32_t(l.kind));
1035 6632 : hash = mozilla::AddToHash(hash, uint32_t(l.engine));
1036 6632 : return hash;
1037 : }
1038 :
1039 : bool
1040 6131 : CacheIRStubKey::match(const CacheIRStubKey& entry, const CacheIRStubKey::Lookup& l)
1041 : {
1042 6131 : if (entry.stubInfo->kind() != l.kind)
1043 0 : return false;
1044 :
1045 6131 : if (entry.stubInfo->engine() != l.engine)
1046 0 : return false;
1047 :
1048 6131 : if (entry.stubInfo->codeLength() != l.length)
1049 0 : return false;
1050 :
1051 6131 : if (!mozilla::PodEqual(entry.stubInfo->code(), l.code, l.length))
1052 0 : return false;
1053 :
1054 6131 : return true;
1055 : }
1056 :
1057 144 : CacheIRReader::CacheIRReader(const CacheIRStubInfo* stubInfo)
1058 144 : : CacheIRReader(stubInfo->code(), stubInfo->code() + stubInfo->codeLength())
1059 144 : {}
1060 :
1061 : CacheIRStubInfo*
1062 251 : CacheIRStubInfo::New(CacheKind kind, ICStubEngine engine, bool makesGCCalls,
1063 : uint32_t stubDataOffset, const CacheIRWriter& writer)
1064 : {
1065 251 : size_t numStubFields = writer.numStubFields();
1066 : size_t bytesNeeded = sizeof(CacheIRStubInfo) +
1067 251 : writer.codeLength() +
1068 251 : (numStubFields + 1); // +1 for the GCType::Limit terminator.
1069 251 : uint8_t* p = js_pod_malloc<uint8_t>(bytesNeeded);
1070 251 : if (!p)
1071 0 : return nullptr;
1072 :
1073 : // Copy the CacheIR code.
1074 251 : uint8_t* codeStart = p + sizeof(CacheIRStubInfo);
1075 251 : mozilla::PodCopy(codeStart, writer.codeStart(), writer.codeLength());
1076 :
1077 : static_assert(sizeof(StubField::Type) == sizeof(uint8_t),
1078 : "StubField::Type must fit in uint8_t");
1079 :
1080 : // Copy the stub field types.
1081 251 : uint8_t* fieldTypes = codeStart + writer.codeLength();
1082 976 : for (size_t i = 0; i < numStubFields; i++)
1083 725 : fieldTypes[i] = uint8_t(writer.stubFieldType(i));
1084 251 : fieldTypes[numStubFields] = uint8_t(StubField::Type::Limit);
1085 :
1086 : return new(p) CacheIRStubInfo(kind, engine, makesGCCalls, stubDataOffset, codeStart,
1087 251 : writer.codeLength(), fieldTypes);
1088 : }
1089 :
1090 : bool
1091 2206 : OperandLocation::operator==(const OperandLocation& other) const
1092 : {
1093 2206 : if (kind_ != other.kind_)
1094 664 : return false;
1095 :
1096 1542 : switch (kind()) {
1097 : case Uninitialized:
1098 0 : return true;
1099 : case PayloadReg:
1100 561 : return payloadReg() == other.payloadReg() && payloadType() == other.payloadType();
1101 : case ValueReg:
1102 839 : return valueReg() == other.valueReg();
1103 : case PayloadStack:
1104 0 : return payloadStack() == other.payloadStack() && payloadType() == other.payloadType();
1105 : case ValueStack:
1106 0 : return valueStack() == other.valueStack();
1107 : case BaselineFrame:
1108 142 : return baselineFrameSlot() == other.baselineFrameSlot();
1109 : case Constant:
1110 0 : return constant() == other.constant();
1111 : case DoubleReg:
1112 0 : return doubleReg() == other.doubleReg();
1113 : }
1114 :
1115 0 : MOZ_CRASH("Invalid OperandLocation kind");
1116 : }
1117 :
1118 179 : AutoOutputRegister::AutoOutputRegister(CacheIRCompiler& compiler)
1119 179 : : output_(compiler.outputUnchecked_.ref()),
1120 179 : alloc_(compiler.allocator)
1121 : {
1122 179 : if (output_.hasValue())
1123 178 : alloc_.allocateFixedValueRegister(compiler.masm, output_.valueReg());
1124 1 : else if (!output_.typedReg().isFloat())
1125 1 : alloc_.allocateFixedRegister(compiler.masm, output_.typedReg().gpr());
1126 179 : }
1127 :
1128 358 : AutoOutputRegister::~AutoOutputRegister()
1129 : {
1130 179 : if (output_.hasValue())
1131 178 : alloc_.releaseValueRegister(output_.valueReg());
1132 1 : else if (!output_.typedReg().isFloat())
1133 1 : alloc_.releaseRegister(output_.typedReg().gpr());
1134 179 : }
1135 :
1136 : bool
1137 703 : FailurePath::canShareFailurePath(const FailurePath& other) const
1138 : {
1139 703 : if (stackPushed_ != other.stackPushed_)
1140 10 : return false;
1141 :
1142 693 : if (spilledRegs_.length() != other.spilledRegs_.length())
1143 0 : return false;
1144 :
1145 693 : for (size_t i = 0; i < spilledRegs_.length(); i++) {
1146 0 : if (spilledRegs_[i] != other.spilledRegs_[i])
1147 0 : return false;
1148 : }
1149 :
1150 693 : MOZ_ASSERT(inputs_.length() == other.inputs_.length());
1151 :
1152 1484 : for (size_t i = 0; i < inputs_.length(); i++) {
1153 1096 : if (inputs_[i] != other.inputs_[i])
1154 305 : return false;
1155 : }
1156 388 : return true;
1157 : }
1158 :
1159 : bool
1160 967 : CacheIRCompiler::addFailurePath(FailurePath** failure)
1161 : {
1162 1934 : FailurePath newFailure;
1163 2739 : for (size_t i = 0; i < writer_.numInputOperands(); i++) {
1164 1772 : if (!newFailure.appendInput(allocator.operandLocation(i)))
1165 0 : return false;
1166 : }
1167 967 : if (!newFailure.setSpilledRegs(allocator.spilledRegs()))
1168 0 : return false;
1169 967 : newFailure.setStackPushed(allocator.stackPushed());
1170 :
1171 : // Reuse the previous failure path if the current one is the same, to
1172 : // avoid emitting duplicate code.
1173 967 : if (failurePaths.length() > 0 && failurePaths.back().canShareFailurePath(newFailure)) {
1174 388 : *failure = &failurePaths.back();
1175 388 : return true;
1176 : }
1177 :
1178 579 : if (!failurePaths.append(Move(newFailure)))
1179 0 : return false;
1180 :
1181 579 : *failure = &failurePaths.back();
1182 579 : return true;
1183 : }
1184 :
1185 : bool
1186 579 : CacheIRCompiler::emitFailurePath(size_t index)
1187 : {
1188 579 : FailurePath& failure = failurePaths[index];
1189 :
1190 579 : allocator.setStackPushed(failure.stackPushed());
1191 :
1192 1615 : for (size_t i = 0; i < writer_.numInputOperands(); i++)
1193 1036 : allocator.setOperandLocation(i, failure.input(i));
1194 :
1195 579 : if (!allocator.setSpilledRegs(failure.spilledRegs()))
1196 0 : return false;
1197 :
1198 579 : masm.bind(failure.label());
1199 579 : allocator.restoreInputState(masm);
1200 579 : return true;
1201 : }
1202 :
1203 : bool
1204 224 : CacheIRCompiler::emitGuardIsObject()
1205 : {
1206 224 : ValOperandId inputId = reader.valOperandId();
1207 224 : if (allocator.knownType(inputId) == JSVAL_TYPE_OBJECT)
1208 21 : return true;
1209 :
1210 203 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1211 : FailurePath* failure;
1212 203 : if (!addFailurePath(&failure))
1213 0 : return false;
1214 203 : masm.branchTestObject(Assembler::NotEqual, input, failure->label());
1215 203 : return true;
1216 : }
1217 :
1218 : bool
1219 4 : CacheIRCompiler::emitGuardIsObjectOrNull()
1220 : {
1221 4 : ValOperandId inputId = reader.valOperandId();
1222 4 : JSValueType knownType = allocator.knownType(inputId);
1223 4 : if (knownType == JSVAL_TYPE_OBJECT || knownType == JSVAL_TYPE_NULL)
1224 0 : return true;
1225 :
1226 4 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1227 : FailurePath* failure;
1228 4 : if (!addFailurePath(&failure))
1229 0 : return false;
1230 :
1231 8 : Label done;
1232 4 : masm.branchTestObject(Assembler::Equal, input, &done);
1233 4 : masm.branchTestNull(Assembler::NotEqual, input, failure->label());
1234 4 : masm.bind(&done);
1235 4 : return true;
1236 : }
1237 :
1238 : bool
1239 70 : CacheIRCompiler::emitGuardIsString()
1240 : {
1241 70 : ValOperandId inputId = reader.valOperandId();
1242 70 : if (allocator.knownType(inputId) == JSVAL_TYPE_STRING)
1243 7 : return true;
1244 :
1245 63 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1246 : FailurePath* failure;
1247 63 : if (!addFailurePath(&failure))
1248 0 : return false;
1249 63 : masm.branchTestString(Assembler::NotEqual, input, failure->label());
1250 63 : return true;
1251 : }
1252 :
1253 : bool
1254 14 : CacheIRCompiler::emitGuardIsSymbol()
1255 : {
1256 14 : ValOperandId inputId = reader.valOperandId();
1257 14 : if (allocator.knownType(inputId) == JSVAL_TYPE_SYMBOL)
1258 0 : return true;
1259 :
1260 14 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1261 : FailurePath* failure;
1262 14 : if (!addFailurePath(&failure))
1263 0 : return false;
1264 14 : masm.branchTestSymbol(Assembler::NotEqual, input, failure->label());
1265 14 : return true;
1266 : }
1267 :
1268 : bool
1269 27 : CacheIRCompiler::emitGuardIsInt32Index()
1270 : {
1271 27 : ValOperandId inputId = reader.valOperandId();
1272 27 : Register output = allocator.defineRegister(masm, reader.int32OperandId());
1273 :
1274 27 : if (allocator.knownType(inputId) == JSVAL_TYPE_INT32) {
1275 7 : Register input = allocator.useRegister(masm, Int32OperandId(inputId.id()));
1276 7 : masm.move32(input, output);
1277 7 : return true;
1278 : }
1279 :
1280 20 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1281 :
1282 : FailurePath* failure;
1283 20 : if (!addFailurePath(&failure))
1284 0 : return false;
1285 :
1286 40 : Label notInt32, done;
1287 20 : masm.branchTestInt32(Assembler::NotEqual, input, ¬Int32);
1288 20 : masm.unboxInt32(input, output);
1289 20 : masm.jump(&done);
1290 :
1291 20 : masm.bind(¬Int32);
1292 :
1293 20 : if (cx_->runtime()->jitSupportsFloatingPoint) {
1294 20 : masm.branchTestDouble(Assembler::NotEqual, input, failure->label());
1295 :
1296 : // If we're compiling a Baseline IC, FloatReg0 is always available.
1297 40 : Label failurePopReg;
1298 20 : if (mode_ != Mode::Baseline)
1299 1 : masm.push(FloatReg0);
1300 :
1301 20 : masm.unboxDouble(input, FloatReg0);
1302 : // ToPropertyKey(-0.0) is "0", so we can truncate -0.0 to 0 here.
1303 39 : masm.convertDoubleToInt32(FloatReg0, output,
1304 39 : (mode_ == Mode::Baseline) ? failure->label() : &failurePopReg,
1305 20 : false);
1306 20 : if (mode_ != Mode::Baseline) {
1307 1 : masm.pop(FloatReg0);
1308 1 : masm.jump(&done);
1309 :
1310 1 : masm.bind(&failurePopReg);
1311 1 : masm.pop(FloatReg0);
1312 1 : masm.jump(failure->label());
1313 : }
1314 : } else {
1315 0 : masm.jump(failure->label());
1316 : }
1317 :
1318 20 : masm.bind(&done);
1319 20 : return true;
1320 : }
1321 :
1322 : bool
1323 25 : CacheIRCompiler::emitGuardType()
1324 : {
1325 25 : ValOperandId inputId = reader.valOperandId();
1326 25 : JSValueType type = reader.valueType();
1327 :
1328 25 : if (allocator.knownType(inputId) == type)
1329 0 : return true;
1330 :
1331 25 : ValueOperand input = allocator.useValueRegister(masm, inputId);
1332 :
1333 : FailurePath* failure;
1334 25 : if (!addFailurePath(&failure))
1335 0 : return false;
1336 :
1337 25 : switch (type) {
1338 : case JSVAL_TYPE_STRING:
1339 9 : masm.branchTestString(Assembler::NotEqual, input, failure->label());
1340 9 : break;
1341 : case JSVAL_TYPE_SYMBOL:
1342 2 : masm.branchTestSymbol(Assembler::NotEqual, input, failure->label());
1343 2 : break;
1344 : case JSVAL_TYPE_INT32:
1345 0 : masm.branchTestInt32(Assembler::NotEqual, input, failure->label());
1346 0 : break;
1347 : case JSVAL_TYPE_DOUBLE:
1348 2 : masm.branchTestNumber(Assembler::NotEqual, input, failure->label());
1349 2 : break;
1350 : case JSVAL_TYPE_BOOLEAN:
1351 4 : masm.branchTestBoolean(Assembler::NotEqual, input, failure->label());
1352 4 : break;
1353 : case JSVAL_TYPE_UNDEFINED:
1354 5 : masm.branchTestUndefined(Assembler::NotEqual, input, failure->label());
1355 5 : break;
1356 : case JSVAL_TYPE_NULL:
1357 3 : masm.branchTestNull(Assembler::NotEqual, input, failure->label());
1358 3 : break;
1359 : default:
1360 0 : MOZ_CRASH("Unexpected type");
1361 : }
1362 :
1363 25 : return true;
1364 : }
1365 :
1366 : bool
1367 14 : CacheIRCompiler::emitGuardClass()
1368 : {
1369 14 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1370 28 : AutoScratchRegister scratch(allocator, masm);
1371 :
1372 : FailurePath* failure;
1373 14 : if (!addFailurePath(&failure))
1374 0 : return false;
1375 :
1376 14 : const Class* clasp = nullptr;
1377 14 : switch (reader.guardClassKind()) {
1378 : case GuardClassKind::Array:
1379 13 : clasp = &ArrayObject::class_;
1380 13 : break;
1381 : case GuardClassKind::UnboxedArray:
1382 0 : clasp = &UnboxedArrayObject::class_;
1383 0 : break;
1384 : case GuardClassKind::MappedArguments:
1385 0 : clasp = &MappedArgumentsObject::class_;
1386 0 : break;
1387 : case GuardClassKind::UnmappedArguments:
1388 1 : clasp = &UnmappedArgumentsObject::class_;
1389 1 : break;
1390 : case GuardClassKind::WindowProxy:
1391 0 : clasp = cx_->runtime()->maybeWindowProxyClass();
1392 0 : break;
1393 : case GuardClassKind::JSFunction:
1394 0 : clasp = &JSFunction::class_;
1395 0 : break;
1396 : }
1397 :
1398 14 : MOZ_ASSERT(clasp);
1399 14 : masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, clasp, failure->label());
1400 14 : return true;
1401 : }
1402 :
1403 : bool
1404 1 : CacheIRCompiler::emitGuardIsNativeFunction()
1405 : {
1406 1 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1407 1 : JSNative nativeFunc = reinterpret_cast<JSNative>(reader.pointer());
1408 2 : AutoScratchRegister scratch(allocator, masm);
1409 :
1410 : FailurePath* failure;
1411 1 : if (!addFailurePath(&failure))
1412 0 : return false;
1413 :
1414 : // Ensure obj is a function.
1415 1 : const Class* clasp = &JSFunction::class_;
1416 1 : masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, clasp, failure->label());
1417 :
1418 : // Ensure function native matches.
1419 2 : masm.branchPtr(Assembler::NotEqual, Address(obj, JSFunction::offsetOfNativeOrScript()),
1420 1 : ImmPtr(nativeFunc), failure->label());
1421 1 : return true;
1422 : }
1423 :
1424 : bool
1425 30 : CacheIRCompiler::emitGuardIsProxy()
1426 : {
1427 30 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1428 60 : AutoScratchRegister scratch(allocator, masm);
1429 :
1430 : FailurePath* failure;
1431 30 : if (!addFailurePath(&failure))
1432 0 : return false;
1433 :
1434 30 : masm.branchTestObjectIsProxy(false, obj, scratch, failure->label());
1435 30 : return true;
1436 : }
1437 :
1438 : bool
1439 17 : CacheIRCompiler::emitGuardIsCrossCompartmentWrapper()
1440 : {
1441 17 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1442 34 : AutoScratchRegister scratch(allocator, masm);
1443 :
1444 : FailurePath* failure;
1445 17 : if (!addFailurePath(&failure))
1446 0 : return false;
1447 :
1448 17 : Address handlerAddr(obj, ProxyObject::offsetOfHandler());
1449 34 : masm.branchPtr(Assembler::NotEqual, handlerAddr, ImmPtr(&CrossCompartmentWrapper::singleton),
1450 17 : failure->label());
1451 17 : return true;
1452 : }
1453 :
1454 : bool
1455 7 : CacheIRCompiler::emitGuardNotDOMProxy()
1456 : {
1457 7 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1458 14 : AutoScratchRegister scratch(allocator, masm);
1459 :
1460 : FailurePath* failure;
1461 7 : if (!addFailurePath(&failure))
1462 0 : return false;
1463 :
1464 7 : masm.branchTestProxyHandlerFamily(Assembler::Equal, obj, scratch,
1465 7 : GetDOMProxyHandlerFamily(), failure->label());
1466 7 : return true;
1467 : }
1468 :
1469 : bool
1470 1 : CacheIRCompiler::emitGuardSpecificInt32Immediate()
1471 : {
1472 1 : Register reg = allocator.useRegister(masm, reader.int32OperandId());
1473 1 : int32_t ival = reader.int32Immediate();
1474 :
1475 : FailurePath* failure;
1476 1 : if (!addFailurePath(&failure))
1477 0 : return false;
1478 :
1479 1 : masm.branch32(Assembler::NotEqual, reg, Imm32(ival), failure->label());
1480 1 : return true;
1481 : }
1482 :
1483 : bool
1484 6 : CacheIRCompiler::emitGuardMagicValue()
1485 : {
1486 6 : ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId());
1487 6 : JSWhyMagic magic = reader.whyMagic();
1488 :
1489 : FailurePath* failure;
1490 6 : if (!addFailurePath(&failure))
1491 0 : return false;
1492 :
1493 6 : masm.branchTestMagicValue(Assembler::NotEqual, val, magic, failure->label());
1494 6 : return true;
1495 : }
1496 :
1497 : bool
1498 0 : CacheIRCompiler::emitGuardNoUnboxedExpando()
1499 : {
1500 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1501 :
1502 : FailurePath* failure;
1503 0 : if (!addFailurePath(&failure))
1504 0 : return false;
1505 :
1506 0 : Address expandoAddr(obj, UnboxedPlainObject::offsetOfExpando());
1507 0 : masm.branchPtr(Assembler::NotEqual, expandoAddr, ImmWord(0), failure->label());
1508 0 : return true;
1509 : }
1510 :
1511 : bool
1512 0 : CacheIRCompiler::emitGuardAndLoadUnboxedExpando()
1513 : {
1514 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1515 0 : Register output = allocator.defineRegister(masm, reader.objOperandId());
1516 :
1517 : FailurePath* failure;
1518 0 : if (!addFailurePath(&failure))
1519 0 : return false;
1520 :
1521 0 : Address expandoAddr(obj, UnboxedPlainObject::offsetOfExpando());
1522 0 : masm.loadPtr(expandoAddr, output);
1523 0 : masm.branchTestPtr(Assembler::Zero, output, output, failure->label());
1524 0 : return true;
1525 : }
1526 :
1527 : bool
1528 0 : CacheIRCompiler::emitGuardNoDetachedTypedObjects()
1529 : {
1530 : FailurePath* failure;
1531 0 : if (!addFailurePath(&failure))
1532 0 : return false;
1533 :
1534 0 : CheckForTypedObjectWithDetachedStorage(cx_, masm, failure->label());
1535 0 : return true;
1536 : }
1537 :
1538 : bool
1539 2 : CacheIRCompiler::emitGuardNoDenseElements()
1540 : {
1541 2 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1542 4 : AutoScratchRegister scratch(allocator, masm);
1543 :
1544 : FailurePath* failure;
1545 2 : if (!addFailurePath(&failure))
1546 0 : return false;
1547 :
1548 : // Load obj->elements.
1549 2 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1550 :
1551 : // Make sure there are no dense elements.
1552 2 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1553 2 : masm.branch32(Assembler::NotEqual, initLength, Imm32(0), failure->label());
1554 2 : return true;
1555 : }
1556 :
1557 : bool
1558 2 : CacheIRCompiler::emitGuardAndGetIndexFromString()
1559 : {
1560 2 : Register str = allocator.useRegister(masm, reader.stringOperandId());
1561 2 : Register output = allocator.defineRegister(masm, reader.int32OperandId());
1562 :
1563 : FailurePath* failure;
1564 2 : if (!addFailurePath(&failure))
1565 0 : return false;
1566 :
1567 4 : Label vmCall, done;
1568 2 : masm.loadStringIndexValue(str, output, &vmCall);
1569 2 : masm.jump(&done);
1570 :
1571 : {
1572 2 : masm.bind(&vmCall);
1573 2 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
1574 2 : masm.PushRegsInMask(save);
1575 :
1576 2 : masm.setupUnalignedABICall(output);
1577 2 : masm.passABIArg(str);
1578 2 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, GetIndexFromString));
1579 2 : masm.mov(ReturnReg, output);
1580 :
1581 2 : LiveRegisterSet ignore;
1582 2 : ignore.add(output);
1583 2 : masm.PopRegsInMaskIgnore(save, ignore);
1584 :
1585 : // GetIndexFromString returns a negative value on failure.
1586 2 : masm.branchTest32(Assembler::Signed, output, output, failure->label());
1587 : }
1588 :
1589 2 : masm.bind(&done);
1590 2 : return true;
1591 : }
1592 :
1593 : bool
1594 92 : CacheIRCompiler::emitLoadProto()
1595 : {
1596 92 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1597 92 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1598 92 : masm.loadObjProto(obj, reg);
1599 92 : return true;
1600 : }
1601 :
1602 : bool
1603 8 : CacheIRCompiler::emitLoadEnclosingEnvironment()
1604 : {
1605 8 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1606 8 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1607 8 : masm.extractObject(Address(obj, EnvironmentObject::offsetOfEnclosingEnvironment()), reg);
1608 8 : return true;
1609 : }
1610 :
1611 : bool
1612 17 : CacheIRCompiler::emitLoadWrapperTarget()
1613 : {
1614 17 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1615 17 : Register reg = allocator.defineRegister(masm, reader.objOperandId());
1616 :
1617 17 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), reg);
1618 17 : masm.unboxObject(Address(reg, detail::ProxyReservedSlots::offsetOfPrivateSlot()), reg);
1619 17 : return true;
1620 : }
1621 :
1622 : bool
1623 2 : CacheIRCompiler::emitLoadDOMExpandoValue()
1624 : {
1625 2 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1626 2 : ValueOperand val = allocator.defineValueRegister(masm, reader.valOperandId());
1627 :
1628 2 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), val.scratchReg());
1629 4 : masm.loadValue(Address(val.scratchReg(),
1630 : detail::ProxyReservedSlots::offsetOfPrivateSlot()),
1631 2 : val);
1632 2 : return true;
1633 : }
1634 :
1635 : bool
1636 0 : CacheIRCompiler::emitLoadDOMExpandoValueIgnoreGeneration()
1637 : {
1638 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1639 0 : ValueOperand output = allocator.defineValueRegister(masm, reader.valOperandId());
1640 :
1641 : // Determine the expando's Address.
1642 0 : Register scratch = output.scratchReg();
1643 0 : masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), scratch);
1644 0 : Address expandoAddr(scratch, detail::ProxyReservedSlots::offsetOfPrivateSlot());
1645 :
1646 : #ifdef DEBUG
1647 : // Private values are stored as doubles, so assert we have a double.
1648 0 : Label ok;
1649 0 : masm.branchTestDouble(Assembler::Equal, expandoAddr, &ok);
1650 0 : masm.assumeUnreachable("DOM expando is not a PrivateValue!");
1651 0 : masm.bind(&ok);
1652 : #endif
1653 :
1654 : // Load the ExpandoAndGeneration* from the PrivateValue.
1655 0 : masm.loadPrivate(expandoAddr, scratch);
1656 :
1657 : // Load expandoAndGeneration->expando into the output Value register.
1658 0 : masm.loadValue(Address(scratch, ExpandoAndGeneration::offsetOfExpando()), output);
1659 0 : return true;
1660 : }
1661 :
1662 : bool
1663 22 : CacheIRCompiler::emitLoadUndefinedResult()
1664 : {
1665 44 : AutoOutputRegister output(*this);
1666 22 : if (output.hasValue())
1667 22 : masm.moveValue(UndefinedValue(), output.valueReg());
1668 : else
1669 0 : masm.assumeUnreachable("Should have monitored undefined result");
1670 44 : return true;
1671 : }
1672 :
1673 : static void
1674 11 : EmitStoreBoolean(MacroAssembler& masm, bool b, const AutoOutputRegister& output)
1675 : {
1676 11 : if (output.hasValue()) {
1677 10 : Value val = BooleanValue(b);
1678 10 : masm.moveValue(val, output.valueReg());
1679 : } else {
1680 1 : MOZ_ASSERT(output.type() == JSVAL_TYPE_BOOLEAN);
1681 1 : masm.movePtr(ImmWord(b), output.typedReg().gpr());
1682 : }
1683 11 : }
1684 :
1685 : bool
1686 6 : CacheIRCompiler::emitLoadBooleanResult()
1687 : {
1688 12 : AutoOutputRegister output(*this);
1689 6 : bool b = reader.readBool();
1690 6 : EmitStoreBoolean(masm, b, output);
1691 :
1692 12 : return true;
1693 : }
1694 :
1695 : static void
1696 12 : EmitStoreResult(MacroAssembler& masm, Register reg, JSValueType type,
1697 : const AutoOutputRegister& output)
1698 : {
1699 12 : if (output.hasValue()) {
1700 12 : masm.tagValue(type, reg, output.valueReg());
1701 12 : return;
1702 : }
1703 0 : if (type == JSVAL_TYPE_INT32 && output.typedReg().isFloat()) {
1704 0 : masm.convertInt32ToDouble(reg, output.typedReg().fpu());
1705 0 : return;
1706 : }
1707 0 : if (type == output.type()) {
1708 0 : masm.mov(reg, output.typedReg().gpr());
1709 0 : return;
1710 : }
1711 0 : masm.assumeUnreachable("Should have monitored result");
1712 : }
1713 :
1714 : bool
1715 9 : CacheIRCompiler::emitLoadInt32ArrayLengthResult()
1716 : {
1717 18 : AutoOutputRegister output(*this);
1718 9 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1719 18 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1720 :
1721 : FailurePath* failure;
1722 9 : if (!addFailurePath(&failure))
1723 0 : return false;
1724 :
1725 9 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1726 9 : masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratch);
1727 :
1728 : // Guard length fits in an int32.
1729 9 : masm.branchTest32(Assembler::Signed, scratch, scratch, failure->label());
1730 9 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1731 9 : return true;
1732 : }
1733 :
1734 : bool
1735 0 : CacheIRCompiler::emitLoadUnboxedArrayLengthResult()
1736 : {
1737 0 : AutoOutputRegister output(*this);
1738 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1739 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1740 :
1741 0 : masm.load32(Address(obj, UnboxedArrayObject::offsetOfLength()), scratch);
1742 0 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1743 0 : return true;
1744 : }
1745 :
1746 : bool
1747 1 : CacheIRCompiler::emitLoadArgumentsObjectLengthResult()
1748 : {
1749 2 : AutoOutputRegister output(*this);
1750 1 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1751 2 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1752 :
1753 : FailurePath* failure;
1754 1 : if (!addFailurePath(&failure))
1755 0 : return false;
1756 :
1757 : // Get initial length value.
1758 1 : masm.unboxInt32(Address(obj, ArgumentsObject::getInitialLengthSlotOffset()), scratch);
1759 :
1760 : // Test if length has been overridden.
1761 2 : masm.branchTest32(Assembler::NonZero,
1762 : scratch,
1763 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
1764 1 : failure->label());
1765 :
1766 : // Shift out arguments length and return it. No need to type monitor
1767 : // because this stub always returns int32.
1768 1 : masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratch);
1769 1 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1770 1 : return true;
1771 : }
1772 :
1773 : bool
1774 0 : CacheIRCompiler::emitLoadFunctionLengthResult()
1775 : {
1776 0 : AutoOutputRegister output(*this);
1777 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1778 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1779 :
1780 : FailurePath* failure;
1781 0 : if (!addFailurePath(&failure))
1782 0 : return false;
1783 :
1784 : // Get the JSFunction flags.
1785 0 : masm.load16ZeroExtend(Address(obj, JSFunction::offsetOfFlags()), scratch);
1786 :
1787 : // Functions with lazy scripts don't store their length.
1788 : // If the length was resolved before the length property might be shadowed.
1789 0 : masm.branchTest32(Assembler::NonZero,
1790 : scratch,
1791 : Imm32(JSFunction::INTERPRETED_LAZY |
1792 : JSFunction::RESOLVED_LENGTH),
1793 0 : failure->label());
1794 :
1795 0 : Label boundFunction;
1796 0 : masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSFunction::BOUND_FUN), &boundFunction);
1797 0 : Label interpreted;
1798 0 : masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSFunction::INTERPRETED), &interpreted);
1799 :
1800 : // Load the length of the native function.
1801 0 : masm.load16ZeroExtend(Address(obj, JSFunction::offsetOfNargs()), scratch);
1802 0 : Label done;
1803 0 : masm.jump(&done);
1804 :
1805 0 : masm.bind(&boundFunction);
1806 : // Bound functions might have a non-int32 length.
1807 0 : Address boundLength(obj, FunctionExtended::offsetOfExtendedSlot(BOUND_FUN_LENGTH_SLOT));
1808 0 : masm.branchTestInt32(Assembler::NotEqual, boundLength, failure->label());
1809 0 : masm.unboxInt32(boundLength, scratch);
1810 0 : masm.jump(&done);
1811 :
1812 0 : masm.bind(&interpreted);
1813 : // Load the length from the function's script.
1814 0 : masm.loadPtr(Address(obj, JSFunction::offsetOfNativeOrScript()), scratch);
1815 0 : masm.load16ZeroExtend(Address(scratch, JSScript::offsetOfFunLength()), scratch);
1816 :
1817 0 : masm.bind(&done);
1818 0 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1819 0 : return true;
1820 : }
1821 :
1822 : bool
1823 1 : CacheIRCompiler::emitLoadStringLengthResult()
1824 : {
1825 2 : AutoOutputRegister output(*this);
1826 1 : Register str = allocator.useRegister(masm, reader.stringOperandId());
1827 2 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1828 :
1829 1 : masm.loadStringLength(str, scratch);
1830 1 : EmitStoreResult(masm, scratch, JSVAL_TYPE_INT32, output);
1831 2 : return true;
1832 : }
1833 :
1834 : bool
1835 1 : CacheIRCompiler::emitLoadStringCharResult()
1836 : {
1837 2 : AutoOutputRegister output(*this);
1838 1 : Register str = allocator.useRegister(masm, reader.stringOperandId());
1839 1 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1840 2 : AutoScratchRegisterMaybeOutput scratch1(allocator, masm, output);
1841 2 : AutoScratchRegister scratch2(allocator, masm);
1842 :
1843 : FailurePath* failure;
1844 1 : if (!addFailurePath(&failure))
1845 0 : return false;
1846 :
1847 : // Bounds check, load string char.
1848 2 : masm.branch32(Assembler::BelowOrEqual, Address(str, JSString::offsetOfLength()),
1849 1 : index, failure->label());
1850 1 : masm.loadStringChar(str, index, scratch1, failure->label());
1851 :
1852 : // Load StaticString for this char.
1853 2 : masm.branch32(Assembler::AboveOrEqual, scratch1, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
1854 1 : failure->label());
1855 1 : masm.movePtr(ImmPtr(&cx_->staticStrings().unitStaticTable), scratch2);
1856 1 : masm.loadPtr(BaseIndex(scratch2, scratch1, ScalePointer), scratch2);
1857 :
1858 1 : EmitStoreResult(masm, scratch2, JSVAL_TYPE_STRING, output);
1859 1 : return true;
1860 : }
1861 :
1862 : bool
1863 0 : CacheIRCompiler::emitLoadArgumentsObjectArgResult()
1864 : {
1865 0 : AutoOutputRegister output(*this);
1866 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1867 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1868 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1869 :
1870 : FailurePath* failure;
1871 0 : if (!addFailurePath(&failure))
1872 0 : return false;
1873 :
1874 : // Get initial length value.
1875 0 : masm.unboxInt32(Address(obj, ArgumentsObject::getInitialLengthSlotOffset()), scratch);
1876 :
1877 : // Ensure no overridden length/element.
1878 0 : masm.branchTest32(Assembler::NonZero,
1879 : scratch,
1880 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT |
1881 : ArgumentsObject::ELEMENT_OVERRIDDEN_BIT),
1882 0 : failure->label());
1883 :
1884 : // Bounds check.
1885 0 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratch);
1886 0 : masm.branch32(Assembler::AboveOrEqual, index, scratch, failure->label());
1887 :
1888 : // Load ArgumentsData.
1889 0 : masm.loadPrivate(Address(obj, ArgumentsObject::getDataSlotOffset()), scratch);
1890 :
1891 : // Fail if we have a RareArgumentsData (elements were deleted).
1892 0 : masm.branchPtr(Assembler::NotEqual,
1893 0 : Address(scratch, offsetof(ArgumentsData, rareData)),
1894 : ImmWord(0),
1895 0 : failure->label());
1896 :
1897 : // Guard the argument is not a FORWARD_TO_CALL_SLOT MagicValue.
1898 0 : BaseValueIndex argValue(scratch, index, ArgumentsData::offsetOfArgs());
1899 0 : masm.branchTestMagic(Assembler::Equal, argValue, failure->label());
1900 0 : masm.loadValue(argValue, output.valueReg());
1901 0 : return true;
1902 : }
1903 :
1904 : bool
1905 11 : CacheIRCompiler::emitLoadDenseElementResult()
1906 : {
1907 22 : AutoOutputRegister output(*this);
1908 11 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1909 11 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1910 22 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1911 :
1912 : FailurePath* failure;
1913 11 : if (!addFailurePath(&failure))
1914 0 : return false;
1915 :
1916 : // Load obj->elements.
1917 11 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1918 :
1919 : // Bounds check.
1920 11 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1921 11 : masm.branch32(Assembler::BelowOrEqual, initLength, index, failure->label());
1922 :
1923 : // Hole check.
1924 11 : BaseObjectElementIndex element(scratch, index);
1925 11 : masm.branchTestMagic(Assembler::Equal, element, failure->label());
1926 11 : masm.loadTypedOrValue(element, output);
1927 11 : return true;
1928 : }
1929 :
1930 : bool
1931 2 : CacheIRCompiler::emitLoadDenseElementHoleResult()
1932 : {
1933 4 : AutoOutputRegister output(*this);
1934 2 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1935 2 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1936 4 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1937 :
1938 2 : if (!output.hasValue()) {
1939 0 : masm.assumeUnreachable("Should have monitored undefined value after attaching stub");
1940 0 : return true;
1941 : }
1942 :
1943 : FailurePath* failure;
1944 2 : if (!addFailurePath(&failure))
1945 0 : return false;
1946 :
1947 : // Make sure the index is nonnegative.
1948 2 : masm.branch32(Assembler::LessThan, index, Imm32(0), failure->label());
1949 :
1950 : // Load obj->elements.
1951 2 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1952 :
1953 : // Guard on the initialized length.
1954 4 : Label hole;
1955 2 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1956 2 : masm.branch32(Assembler::BelowOrEqual, initLength, index, &hole);
1957 :
1958 : // Load the value.
1959 4 : Label done;
1960 2 : masm.loadValue(BaseObjectElementIndex(scratch, index), output.valueReg());
1961 2 : masm.branchTestMagic(Assembler::NotEqual, output.valueReg(), &done);
1962 :
1963 : // Load undefined for the hole.
1964 2 : masm.bind(&hole);
1965 2 : masm.moveValue(UndefinedValue(), output.valueReg());
1966 :
1967 2 : masm.bind(&done);
1968 2 : return true;
1969 : }
1970 :
1971 : bool
1972 5 : CacheIRCompiler::emitLoadDenseElementExistsResult()
1973 : {
1974 10 : AutoOutputRegister output(*this);
1975 5 : Register obj = allocator.useRegister(masm, reader.objOperandId());
1976 5 : Register index = allocator.useRegister(masm, reader.int32OperandId());
1977 10 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
1978 :
1979 : FailurePath* failure;
1980 5 : if (!addFailurePath(&failure))
1981 0 : return false;
1982 :
1983 : // Load obj->elements.
1984 5 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
1985 :
1986 : // Bounds check. Unsigned compare sends negative indices to next IC.
1987 5 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
1988 5 : masm.branch32(Assembler::BelowOrEqual, initLength, index, failure->label());
1989 :
1990 : // Hole check.
1991 5 : BaseObjectElementIndex element(scratch, index);
1992 5 : masm.branchTestMagic(Assembler::Equal, element, failure->label());
1993 :
1994 5 : EmitStoreBoolean(masm, true, output);
1995 5 : return true;
1996 : }
1997 :
1998 : bool
1999 0 : CacheIRCompiler::emitLoadDenseElementHoleExistsResult()
2000 : {
2001 0 : AutoOutputRegister output(*this);
2002 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2003 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2004 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2005 :
2006 : FailurePath* failure;
2007 0 : if (!addFailurePath(&failure))
2008 0 : return false;
2009 :
2010 : // Make sure the index is nonnegative.
2011 0 : masm.branch32(Assembler::LessThan, index, Imm32(0), failure->label());
2012 :
2013 : // Load obj->elements.
2014 0 : masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
2015 :
2016 : // Guard on the initialized length.
2017 0 : Label hole;
2018 0 : Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
2019 0 : masm.branch32(Assembler::BelowOrEqual, initLength, index, &hole);
2020 :
2021 : // Load value and replace with true.
2022 0 : Label done;
2023 0 : BaseObjectElementIndex element(scratch, index);
2024 0 : masm.branchTestMagic(Assembler::Equal, element, &hole);
2025 0 : EmitStoreBoolean(masm, true, output);
2026 0 : masm.jump(&done);
2027 :
2028 : // Load false for the hole.
2029 0 : masm.bind(&hole);
2030 0 : EmitStoreBoolean(masm, false, output);
2031 :
2032 0 : masm.bind(&done);
2033 0 : return true;
2034 : }
2035 :
2036 : bool
2037 0 : CacheIRCompiler::emitLoadUnboxedArrayElementResult()
2038 : {
2039 0 : AutoOutputRegister output(*this);
2040 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2041 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2042 0 : JSValueType elementType = reader.valueType();
2043 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2044 :
2045 : FailurePath* failure;
2046 0 : if (!addFailurePath(&failure))
2047 0 : return false;
2048 :
2049 0 : if (!output.hasValue() &&
2050 0 : elementType != output.type() &&
2051 0 : !(elementType == JSVAL_TYPE_INT32 && output.type() == JSVAL_TYPE_DOUBLE))
2052 : {
2053 0 : masm.assumeUnreachable("Should have monitored unboxed property type");
2054 0 : return true;
2055 : }
2056 :
2057 : // Bounds check.
2058 0 : masm.load32(Address(obj, UnboxedArrayObject::offsetOfCapacityIndexAndInitializedLength()),
2059 0 : scratch);
2060 0 : masm.and32(Imm32(UnboxedArrayObject::InitializedLengthMask), scratch);
2061 0 : masm.branch32(Assembler::BelowOrEqual, scratch, index, failure->label());
2062 :
2063 : // Load obj->elements.
2064 0 : masm.loadPtr(Address(obj, UnboxedArrayObject::offsetOfElements()), scratch);
2065 :
2066 : // Load value.
2067 0 : size_t width = UnboxedTypeSize(elementType);
2068 0 : BaseIndex addr(scratch, index, ScaleFromElemWidth(width));
2069 0 : masm.loadUnboxedProperty(addr, elementType, output);
2070 0 : return true;
2071 : }
2072 :
2073 : bool
2074 0 : CacheIRCompiler::emitLoadTypedElementResult()
2075 : {
2076 0 : AutoOutputRegister output(*this);
2077 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2078 0 : Register index = allocator.useRegister(masm, reader.int32OperandId());
2079 0 : TypedThingLayout layout = reader.typedThingLayout();
2080 0 : Scalar::Type type = reader.scalarType();
2081 :
2082 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2083 :
2084 0 : if (!output.hasValue()) {
2085 0 : if (type == Scalar::Float32 || type == Scalar::Float64) {
2086 0 : if (output.type() != JSVAL_TYPE_DOUBLE) {
2087 0 : masm.assumeUnreachable("Should have monitored double after attaching stub");
2088 0 : return true;
2089 : }
2090 : } else {
2091 0 : if (output.type() != JSVAL_TYPE_INT32 && output.type() != JSVAL_TYPE_DOUBLE) {
2092 0 : masm.assumeUnreachable("Should have monitored int32 after attaching stub");
2093 0 : return true;
2094 : }
2095 : }
2096 : }
2097 :
2098 : FailurePath* failure;
2099 0 : if (!addFailurePath(&failure))
2100 0 : return false;
2101 :
2102 : // Bounds check.
2103 0 : LoadTypedThingLength(masm, layout, obj, scratch);
2104 0 : masm.branch32(Assembler::BelowOrEqual, scratch, index, failure->label());
2105 :
2106 : // Load the elements vector.
2107 0 : LoadTypedThingData(masm, layout, obj, scratch);
2108 :
2109 : // Load the value.
2110 0 : BaseIndex source(scratch, index, ScaleFromElemWidth(Scalar::byteSize(type)));
2111 0 : if (output.hasValue()) {
2112 0 : masm.loadFromTypedArray(type, source, output.valueReg(), *allowDoubleResult_, scratch,
2113 0 : failure->label());
2114 : } else {
2115 0 : bool needGpr = (type == Scalar::Int8 || type == Scalar::Uint8 ||
2116 0 : type == Scalar::Int16 || type == Scalar::Uint16 ||
2117 0 : type == Scalar::Uint8Clamped || type == Scalar::Int32);
2118 0 : if (needGpr && output.type() == JSVAL_TYPE_DOUBLE) {
2119 : // Load the element as integer, then convert it to double.
2120 0 : masm.loadFromTypedArray(type, source, AnyRegister(scratch), scratch, failure->label());
2121 0 : masm.convertInt32ToDouble(source, output.typedReg().fpu());
2122 : } else {
2123 0 : masm.loadFromTypedArray(type, source, output.typedReg(), scratch, failure->label());
2124 : }
2125 : }
2126 0 : return true;
2127 : }
2128 :
2129 : void
2130 0 : CacheIRCompiler::emitLoadTypedObjectResultShared(const Address& fieldAddr, Register scratch,
2131 : TypedThingLayout layout, uint32_t typeDescr,
2132 : const AutoOutputRegister& output)
2133 : {
2134 0 : MOZ_ASSERT(output.hasValue());
2135 :
2136 0 : if (SimpleTypeDescrKeyIsScalar(typeDescr)) {
2137 0 : Scalar::Type type = ScalarTypeFromSimpleTypeDescrKey(typeDescr);
2138 0 : masm.loadFromTypedArray(type, fieldAddr, output.valueReg(),
2139 0 : /* allowDouble = */ true, scratch, nullptr);
2140 : } else {
2141 0 : ReferenceTypeDescr::Type type = ReferenceTypeFromSimpleTypeDescrKey(typeDescr);
2142 0 : switch (type) {
2143 : case ReferenceTypeDescr::TYPE_ANY:
2144 0 : masm.loadValue(fieldAddr, output.valueReg());
2145 0 : break;
2146 :
2147 : case ReferenceTypeDescr::TYPE_OBJECT: {
2148 0 : Label notNull, done;
2149 0 : masm.loadPtr(fieldAddr, scratch);
2150 0 : masm.branchTestPtr(Assembler::NonZero, scratch, scratch, ¬Null);
2151 0 : masm.moveValue(NullValue(), output.valueReg());
2152 0 : masm.jump(&done);
2153 0 : masm.bind(¬Null);
2154 0 : masm.tagValue(JSVAL_TYPE_OBJECT, scratch, output.valueReg());
2155 0 : masm.bind(&done);
2156 0 : break;
2157 : }
2158 :
2159 : case ReferenceTypeDescr::TYPE_STRING:
2160 0 : masm.loadPtr(fieldAddr, scratch);
2161 0 : masm.tagValue(JSVAL_TYPE_STRING, scratch, output.valueReg());
2162 0 : break;
2163 :
2164 : default:
2165 0 : MOZ_CRASH("Invalid ReferenceTypeDescr");
2166 : }
2167 : }
2168 0 : }
2169 :
2170 : bool
2171 0 : CacheIRCompiler::emitLoadObjectResult()
2172 : {
2173 0 : AutoOutputRegister output(*this);
2174 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2175 :
2176 0 : if (output.hasValue())
2177 0 : masm.tagValue(JSVAL_TYPE_OBJECT, obj, output.valueReg());
2178 : else
2179 0 : masm.mov(obj, output.typedReg().gpr());
2180 :
2181 0 : return true;
2182 : }
2183 :
2184 : bool
2185 3 : CacheIRCompiler::emitLoadTypeOfObjectResult()
2186 : {
2187 6 : AutoOutputRegister output(*this);
2188 3 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2189 6 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2190 :
2191 6 : Label slowCheck, isObject, isCallable, isUndefined, done;
2192 3 : masm.typeOfObject(obj, scratch, &slowCheck, &isObject, &isCallable, &isUndefined);
2193 :
2194 3 : masm.bind(&isCallable);
2195 3 : masm.moveValue(StringValue(cx_->names().function), output.valueReg());
2196 3 : masm.jump(&done);
2197 :
2198 3 : masm.bind(&isUndefined);
2199 3 : masm.moveValue(StringValue(cx_->names().undefined), output.valueReg());
2200 3 : masm.jump(&done);
2201 :
2202 3 : masm.bind(&isObject);
2203 3 : masm.moveValue(StringValue(cx_->names().object), output.valueReg());
2204 3 : masm.jump(&done);
2205 :
2206 : {
2207 3 : masm.bind(&slowCheck);
2208 3 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2209 3 : masm.PushRegsInMask(save);
2210 :
2211 3 : masm.setupUnalignedABICall(scratch);
2212 3 : masm.passABIArg(obj);
2213 3 : masm.movePtr(ImmPtr(cx_->runtime()), scratch);
2214 3 : masm.passABIArg(scratch);
2215 3 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, TypeOfObject));
2216 3 : masm.mov(ReturnReg, scratch);
2217 :
2218 3 : LiveRegisterSet ignore;
2219 3 : ignore.add(scratch);
2220 3 : masm.PopRegsInMaskIgnore(save, ignore);
2221 :
2222 3 : masm.tagValue(JSVAL_TYPE_STRING, scratch, output.valueReg());
2223 : }
2224 :
2225 3 : masm.bind(&done);
2226 6 : return true;
2227 : }
2228 :
2229 : bool
2230 11 : CacheIRCompiler::emitCompareStringResult()
2231 : {
2232 22 : AutoOutputRegister output(*this);
2233 :
2234 11 : Register left = allocator.useRegister(masm, reader.stringOperandId());
2235 11 : Register right = allocator.useRegister(masm, reader.stringOperandId());
2236 11 : JSOp op = reader.jsop();
2237 :
2238 22 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2239 :
2240 : FailurePath* failure;
2241 11 : if (!addFailurePath(&failure))
2242 0 : return false;
2243 :
2244 11 : masm.compareStrings(op, left, right, scratch, failure->label());
2245 11 : masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, output.valueReg());
2246 11 : return true;
2247 : }
2248 :
2249 : bool
2250 4 : CacheIRCompiler::emitComparePointerResultShared(bool symbol)
2251 : {
2252 8 : AutoOutputRegister output(*this);
2253 :
2254 4 : Register left = symbol ? allocator.useRegister(masm, reader.symbolOperandId())
2255 8 : : allocator.useRegister(masm, reader.objOperandId());
2256 4 : Register right = symbol ? allocator.useRegister(masm, reader.symbolOperandId())
2257 8 : : allocator.useRegister(masm, reader.objOperandId());
2258 4 : JSOp op = reader.jsop();
2259 :
2260 8 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2261 :
2262 8 : Label ifTrue, done;
2263 4 : masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
2264 :
2265 4 : masm.moveValue(BooleanValue(false), output.valueReg());
2266 4 : masm.jump(&done);
2267 :
2268 4 : masm.bind(&ifTrue);
2269 4 : masm.moveValue(BooleanValue(true), output.valueReg());
2270 4 : masm.bind(&done);
2271 8 : return true;
2272 : }
2273 :
2274 :
2275 : bool
2276 4 : CacheIRCompiler::emitCompareObjectResult()
2277 : {
2278 4 : return emitComparePointerResultShared(false);
2279 : }
2280 :
2281 : bool
2282 0 : CacheIRCompiler::emitCompareSymbolResult()
2283 : {
2284 0 : return emitComparePointerResultShared(true);
2285 : }
2286 :
2287 : bool
2288 0 : CacheIRCompiler::emitCallPrintString()
2289 : {
2290 0 : const char* str = reinterpret_cast<char*>(reader.pointer());
2291 0 : masm.printf(str);
2292 0 : return true;
2293 : }
2294 :
2295 : bool
2296 0 : CacheIRCompiler::emitBreakpoint()
2297 : {
2298 0 : masm.breakpoint();
2299 0 : return true;
2300 : }
2301 :
2302 : void
2303 0 : CacheIRCompiler::emitStoreTypedObjectReferenceProp(ValueOperand val, ReferenceTypeDescr::Type type,
2304 : const Address& dest, Register scratch)
2305 : {
2306 0 : switch (type) {
2307 : case ReferenceTypeDescr::TYPE_ANY:
2308 0 : EmitPreBarrier(masm, dest, MIRType::Value);
2309 0 : masm.storeValue(val, dest);
2310 0 : break;
2311 :
2312 : case ReferenceTypeDescr::TYPE_OBJECT: {
2313 0 : EmitPreBarrier(masm, dest, MIRType::Object);
2314 0 : Label isNull, done;
2315 0 : masm.branchTestObject(Assembler::NotEqual, val, &isNull);
2316 0 : masm.unboxObject(val, scratch);
2317 0 : masm.storePtr(scratch, dest);
2318 0 : masm.jump(&done);
2319 0 : masm.bind(&isNull);
2320 0 : masm.storePtr(ImmWord(0), dest);
2321 0 : masm.bind(&done);
2322 0 : break;
2323 : }
2324 :
2325 : case ReferenceTypeDescr::TYPE_STRING:
2326 0 : EmitPreBarrier(masm, dest, MIRType::String);
2327 0 : masm.unboxString(val, scratch);
2328 0 : masm.storePtr(scratch, dest);
2329 0 : break;
2330 : }
2331 0 : }
2332 :
2333 : void
2334 52 : CacheIRCompiler::emitPostBarrierShared(Register obj, const ConstantOrRegister& val,
2335 : Register scratch, Register maybeIndex)
2336 : {
2337 52 : if (!cx_->nursery().exists())
2338 0 : return;
2339 :
2340 52 : if (val.constant()) {
2341 0 : MOZ_ASSERT_IF(val.value().isObject(), !IsInsideNursery(&val.value().toObject()));
2342 0 : return;
2343 : }
2344 :
2345 52 : TypedOrValueRegister reg = val.reg();
2346 52 : if (reg.hasTyped() && reg.type() != MIRType::Object)
2347 0 : return;
2348 :
2349 104 : Label skipBarrier;
2350 52 : if (reg.hasValue()) {
2351 52 : masm.branchValueIsNurseryObject(Assembler::NotEqual, reg.valueReg(), scratch,
2352 52 : &skipBarrier);
2353 : } else {
2354 0 : masm.branchPtrInNurseryChunk(Assembler::NotEqual, reg.typedReg().gpr(), scratch,
2355 0 : &skipBarrier);
2356 : }
2357 52 : masm.branchPtrInNurseryChunk(Assembler::Equal, obj, scratch, &skipBarrier);
2358 :
2359 : // Call one of these, depending on maybeIndex:
2360 : //
2361 : // void PostWriteBarrier(JSRuntime* rt, JSObject* obj);
2362 : // void PostWriteElementBarrier(JSRuntime* rt, JSObject* obj,
2363 : // int32_t index);
2364 52 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2365 52 : masm.PushRegsInMask(save);
2366 52 : masm.setupUnalignedABICall(scratch);
2367 52 : masm.movePtr(ImmPtr(cx_->runtime()), scratch);
2368 52 : masm.passABIArg(scratch);
2369 52 : masm.passABIArg(obj);
2370 52 : if (maybeIndex != InvalidReg) {
2371 7 : masm.passABIArg(maybeIndex);
2372 14 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*,
2373 7 : (PostWriteElementBarrier<IndexInBounds::Yes>)));
2374 : } else {
2375 45 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, PostWriteBarrier));
2376 : }
2377 52 : masm.PopRegsInMask(save);
2378 :
2379 52 : masm.bind(&skipBarrier);
2380 : }
2381 :
2382 : bool
2383 13 : CacheIRCompiler::emitWrapResult()
2384 : {
2385 26 : AutoOutputRegister output(*this);
2386 26 : AutoScratchRegister scratch(allocator, masm);
2387 :
2388 : FailurePath* failure;
2389 13 : if (!addFailurePath(&failure))
2390 0 : return false;
2391 :
2392 26 : Label done;
2393 : // We only have to wrap objects, because we are in the same zone.
2394 13 : masm.branchTestObject(Assembler::NotEqual, output.valueReg(), &done);
2395 :
2396 13 : Register obj = output.valueReg().scratchReg();
2397 13 : masm.unboxObject(output.valueReg(), obj);
2398 :
2399 13 : LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2400 13 : masm.PushRegsInMask(save);
2401 :
2402 13 : masm.setupUnalignedABICall(scratch);
2403 13 : masm.loadJSContext(scratch);
2404 13 : masm.passABIArg(scratch);
2405 13 : masm.passABIArg(obj);
2406 13 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, WrapObjectPure));
2407 13 : masm.mov(ReturnReg, obj);
2408 :
2409 13 : LiveRegisterSet ignore;
2410 13 : ignore.add(obj);
2411 13 : masm.PopRegsInMaskIgnore(save, ignore);
2412 :
2413 : // We could not get a wrapper for this object.
2414 13 : masm.branchTestPtr(Assembler::Zero, obj, obj, failure->label());
2415 :
2416 : // We clobbered the output register, so we have to retag.
2417 13 : masm.tagValue(JSVAL_TYPE_OBJECT, obj, output.valueReg());
2418 :
2419 13 : masm.bind(&done);
2420 13 : return true;
2421 : }
2422 :
2423 : bool
2424 6 : CacheIRCompiler::emitMegamorphicLoadSlotByValueResult()
2425 : {
2426 12 : AutoOutputRegister output(*this);
2427 :
2428 6 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2429 6 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
2430 6 : bool handleMissing = reader.readBool();
2431 :
2432 12 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2433 :
2434 : FailurePath* failure;
2435 6 : if (!addFailurePath(&failure))
2436 0 : return false;
2437 :
2438 : // idVal will be in vp[0], result will be stored in vp[1].
2439 6 : masm.reserveStack(sizeof(Value));
2440 6 : masm.Push(idVal);
2441 6 : masm.moveStackPtrTo(idVal.scratchReg());
2442 :
2443 6 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2444 6 : volatileRegs.takeUnchecked(scratch);
2445 6 : volatileRegs.takeUnchecked(idVal);
2446 6 : masm.PushRegsInMask(volatileRegs);
2447 :
2448 6 : masm.setupUnalignedABICall(scratch);
2449 6 : masm.loadJSContext(scratch);
2450 6 : masm.passABIArg(scratch);
2451 6 : masm.passABIArg(obj);
2452 6 : masm.passABIArg(idVal.scratchReg());
2453 6 : if (handleMissing)
2454 2 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataPropertyByValue<true>)));
2455 : else
2456 4 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, (GetNativeDataPropertyByValue<false>)));
2457 6 : masm.mov(ReturnReg, scratch);
2458 6 : masm.PopRegsInMask(volatileRegs);
2459 :
2460 6 : masm.Pop(idVal);
2461 :
2462 12 : Label ok;
2463 6 : uint32_t framePushed = masm.framePushed();
2464 6 : masm.branchIfTrueBool(scratch, &ok);
2465 6 : masm.adjustStack(sizeof(Value));
2466 6 : masm.jump(failure->label());
2467 :
2468 6 : masm.bind(&ok);
2469 6 : masm.setFramePushed(framePushed);
2470 6 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
2471 6 : masm.adjustStack(sizeof(Value));
2472 6 : return true;
2473 : }
2474 :
2475 : bool
2476 0 : CacheIRCompiler::emitMegamorphicHasOwnResult()
2477 : {
2478 0 : AutoOutputRegister output(*this);
2479 :
2480 0 : Register obj = allocator.useRegister(masm, reader.objOperandId());
2481 0 : ValueOperand idVal = allocator.useValueRegister(masm, reader.valOperandId());
2482 :
2483 0 : AutoScratchRegisterMaybeOutput scratch(allocator, masm, output);
2484 :
2485 : FailurePath* failure;
2486 0 : if (!addFailurePath(&failure))
2487 0 : return false;
2488 :
2489 : // idVal will be in vp[0], result will be stored in vp[1].
2490 0 : masm.reserveStack(sizeof(Value));
2491 0 : masm.Push(idVal);
2492 0 : masm.moveStackPtrTo(idVal.scratchReg());
2493 :
2494 0 : LiveRegisterSet volatileRegs(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
2495 0 : volatileRegs.takeUnchecked(scratch);
2496 0 : volatileRegs.takeUnchecked(idVal);
2497 0 : masm.PushRegsInMask(volatileRegs);
2498 :
2499 0 : masm.setupUnalignedABICall(scratch);
2500 0 : masm.loadJSContext(scratch);
2501 0 : masm.passABIArg(scratch);
2502 0 : masm.passABIArg(obj);
2503 0 : masm.passABIArg(idVal.scratchReg());
2504 0 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, HasOwnNativeDataProperty));
2505 0 : masm.mov(ReturnReg, scratch);
2506 0 : masm.PopRegsInMask(volatileRegs);
2507 :
2508 0 : masm.Pop(idVal);
2509 :
2510 0 : Label ok;
2511 0 : uint32_t framePushed = masm.framePushed();
2512 0 : masm.branchIfTrueBool(scratch, &ok);
2513 0 : masm.adjustStack(sizeof(Value));
2514 0 : masm.jump(failure->label());
2515 :
2516 0 : masm.bind(&ok);
2517 0 : masm.setFramePushed(framePushed);
2518 0 : masm.loadTypedOrValue(Address(masm.getStackPointer(), 0), output);
2519 0 : masm.adjustStack(sizeof(Value));
2520 0 : return true;
2521 : }
|