LCOV - code coverage report
Current view: top level - js/src/jit - ScalarReplacement.cpp (source / functions) Hit Total Coverage
Test: output.info Lines: 181 557 32.5 %
Date: 2017-07-14 16:53:18 Functions: 21 57 36.8 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
       2             :  * vim: set ts=8 sts=4 et sw=4 tw=99:
       3             :  * This Source Code Form is subject to the terms of the Mozilla Public
       4             :  * License, v. 2.0. If a copy of the MPL was not distributed with this
       5             :  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
       6             : 
       7             : #include "jit/ScalarReplacement.h"
       8             : 
       9             : #include "mozilla/Vector.h"
      10             : 
      11             : #include "jit/IonAnalysis.h"
      12             : #include "jit/JitSpewer.h"
      13             : #include "jit/MIR.h"
      14             : #include "jit/MIRGenerator.h"
      15             : #include "jit/MIRGraph.h"
      16             : #include "vm/UnboxedObject.h"
      17             : 
      18             : #include "jsobjinlines.h"
      19             : 
      20             : namespace js {
      21             : namespace jit {
      22             : 
      23             : template <typename MemoryView>
      24          16 : class EmulateStateOf
      25             : {
      26             :   private:
      27             :     typedef typename MemoryView::BlockState BlockState;
      28             : 
      29             :     MIRGenerator* mir_;
      30             :     MIRGraph& graph_;
      31             : 
      32             :     // Block state at the entrance of all basic blocks.
      33             :     Vector<BlockState*, 8, SystemAllocPolicy> states_;
      34             : 
      35             :   public:
      36          16 :     EmulateStateOf(MIRGenerator* mir, MIRGraph& graph)
      37             :       : mir_(mir),
      38          16 :         graph_(graph)
      39             :     {
      40          16 :     }
      41             : 
      42             :     bool run(MemoryView& view);
      43             : };
      44             : 
      45             : template <typename MemoryView>
      46             : bool
      47           5 : EmulateStateOf<MemoryView>::run(MemoryView& view)
      48             : {
      49             :     // Initialize the current block state of each block to an unknown state.
      50           5 :     if (!states_.appendN(nullptr, graph_.numBlocks()))
      51           0 :         return false;
      52             : 
      53             :     // Initialize the first block which needs to be traversed in RPO.
      54           5 :     MBasicBlock* startBlock = view.startingBlock();
      55           5 :     if (!view.initStartingState(&states_[startBlock->id()]))
      56           0 :         return false;
      57             : 
      58             :     // Iterate over each basic block which has a valid entry state, and merge
      59             :     // the state in the successor blocks.
      60         329 :     for (ReversePostorderIterator block = graph_.rpoBegin(startBlock); block != graph_.rpoEnd(); block++) {
      61         324 :         if (mir_->shouldCancel(MemoryView::phaseName))
      62           0 :             return false;
      63             : 
      64             :         // Get the block state as the result of the merge of all predecessors
      65             :         // which have already been visited in RPO.  This means that backedges
      66             :         // are not yet merged into the loop.
      67         324 :         BlockState* state = states_[block->id()];
      68         324 :         if (!state)
      69          11 :             continue;
      70         313 :         view.setEntryBlockState(state);
      71             : 
      72             :         // Iterates over resume points, phi and instructions.
      73        3029 :         for (MNodeIterator iter(*block); iter; ) {
      74             :             // Increment the iterator before visiting the instruction, as the
      75             :             // visit function might discard itself from the basic block.
      76        2716 :             MNode* ins = *iter++;
      77        2716 :             if (ins->isDefinition())
      78        2044 :                 ins->toDefinition()->accept(&view);
      79             :             else
      80         672 :                 view.visitResumePoint(ins->toResumePoint());
      81        2716 :             if (view.oom())
      82           0 :                 return false;
      83             :         }
      84             : 
      85             :         // For each successor, merge the current state into the state of the
      86             :         // successors.
      87         674 :         for (size_t s = 0; s < block->numSuccessors(); s++) {
      88         361 :             MBasicBlock* succ = block->getSuccessor(s);
      89         361 :             if (!view.mergeIntoSuccessorState(*block, succ, &states_[succ->id()]))
      90           0 :                 return false;
      91             :         }
      92             :     }
      93             : 
      94           5 :     states_.clear();
      95           5 :     return true;
      96             : }
      97             : 
      98             : static bool
      99             : IsObjectEscaped(MInstruction* ins, JSObject* objDefault = nullptr);
     100             : 
     101             : // Returns False if the lambda is not escaped and if it is optimizable by
     102             : // ScalarReplacementOfObject.
     103             : static bool
     104           0 : IsLambdaEscaped(MInstruction* lambda, JSObject* obj)
     105             : {
     106           0 :     MOZ_ASSERT(lambda->isLambda() || lambda->isLambdaArrow());
     107           0 :     JitSpewDef(JitSpew_Escape, "Check lambda\n", lambda);
     108           0 :     JitSpewIndent spewIndent(JitSpew_Escape);
     109             : 
     110             :     // The scope chain is not escaped if none of the Lambdas which are
     111             :     // capturing it are escaped.
     112           0 :     for (MUseIterator i(lambda->usesBegin()); i != lambda->usesEnd(); i++) {
     113           0 :         MNode* consumer = (*i)->consumer();
     114           0 :         if (!consumer->isDefinition()) {
     115             :             // Cannot optimize if it is observable from fun.arguments or others.
     116           0 :             if (!consumer->toResumePoint()->isRecoverableOperand(*i)) {
     117           0 :                 JitSpew(JitSpew_Escape, "Observable lambda cannot be recovered");
     118           0 :                 return true;
     119             :             }
     120           0 :             continue;
     121             :         }
     122             : 
     123           0 :         MDefinition* def = consumer->toDefinition();
     124           0 :         if (!def->isFunctionEnvironment()) {
     125           0 :             JitSpewDef(JitSpew_Escape, "is escaped by\n", def);
     126           0 :             return true;
     127             :         }
     128             : 
     129           0 :         if (IsObjectEscaped(def->toInstruction(), obj)) {
     130           0 :             JitSpewDef(JitSpew_Escape, "is indirectly escaped by\n", def);
     131           0 :             return true;
     132             :         }
     133             :     }
     134           0 :     JitSpew(JitSpew_Escape, "Lambda is not escaped");
     135           0 :     return false;
     136             : }
     137             : 
     138             : static inline bool
     139        2303 : IsOptimizableObjectInstruction(MInstruction* ins)
     140             : {
     141        4594 :     return ins->isNewObject() || ins->isCreateThisWithTemplate() || ins->isNewCallObject() ||
     142        4594 :            ins->isNewIterator();
     143             : }
     144             : 
     145             : // Returns False if the object is not escaped and if it is optimizable by
     146             : // ScalarReplacementOfObject.
     147             : //
     148             : // For the moment, this code is dumb as it only supports objects which are not
     149             : // changing shape, and which are known by TI at the object creation.
     150             : static bool
     151           8 : IsObjectEscaped(MInstruction* ins, JSObject* objDefault)
     152             : {
     153           8 :     MOZ_ASSERT(ins->type() == MIRType::Object);
     154           8 :     MOZ_ASSERT(IsOptimizableObjectInstruction(ins) || ins->isGuardShape() ||
     155             :                ins->isFunctionEnvironment());
     156             : 
     157           8 :     JitSpewDef(JitSpew_Escape, "Check object\n", ins);
     158          16 :     JitSpewIndent spewIndent(JitSpew_Escape);
     159             : 
     160           8 :     JSObject* obj = objDefault;
     161           8 :     if (!obj)
     162           8 :         obj = MObjectState::templateObjectOf(ins);
     163             : 
     164           8 :     if (!obj) {
     165           0 :         JitSpew(JitSpew_Escape, "No template object defined.");
     166           0 :         return true;
     167             :     }
     168             : 
     169             :     // Check if the object is escaped. If the object is not the first argument
     170             :     // of either a known Store / Load, then we consider it as escaped. This is a
     171             :     // cheap and conservative escape analysis.
     172         522 :     for (MUseIterator i(ins->usesBegin()); i != ins->usesEnd(); i++) {
     173         517 :         MNode* consumer = (*i)->consumer();
     174         517 :         if (!consumer->isDefinition()) {
     175             :             // Cannot optimize if it is observable from fun.arguments or others.
     176         464 :             if (!consumer->toResumePoint()->isRecoverableOperand(*i)) {
     177           0 :                 JitSpew(JitSpew_Escape, "Observable object cannot be recovered");
     178           3 :                 return true;
     179             :             }
     180         464 :             continue;
     181             :         }
     182             : 
     183          53 :         MDefinition* def = consumer->toDefinition();
     184          53 :         switch (def->op()) {
     185             :           case MDefinition::Op_StoreFixedSlot:
     186             :           case MDefinition::Op_LoadFixedSlot:
     187             :             // Not escaped if it is the first argument.
     188          42 :             if (def->indexOf(*i) == 0)
     189          42 :                 break;
     190             : 
     191           0 :             JitSpewDef(JitSpew_Escape, "is escaped by\n", def);
     192           0 :             return true;
     193             : 
     194             :           case MDefinition::Op_LoadUnboxedScalar:
     195             :           case MDefinition::Op_StoreUnboxedScalar:
     196             :           case MDefinition::Op_LoadUnboxedObjectOrNull:
     197             :           case MDefinition::Op_StoreUnboxedObjectOrNull:
     198             :           case MDefinition::Op_LoadUnboxedString:
     199             :           case MDefinition::Op_StoreUnboxedString:
     200             :             // Not escaped if it is the first argument.
     201           0 :             if (def->indexOf(*i) != 0) {
     202           0 :                 JitSpewDef(JitSpew_Escape, "is escaped by\n", def);
     203           0 :                 return true;
     204             :             }
     205             : 
     206           0 :             if (!def->getOperand(1)->isConstant()) {
     207           0 :                 JitSpewDef(JitSpew_Escape, "is addressed with unknown index\n", def);
     208           0 :                 return true;
     209             :             }
     210             : 
     211           0 :             break;
     212             : 
     213             :           case MDefinition::Op_PostWriteBarrier:
     214           8 :             break;
     215             : 
     216             :           case MDefinition::Op_Slots: {
     217             : #ifdef DEBUG
     218             :             // Assert that MSlots are only used by MStoreSlot and MLoadSlot.
     219           0 :             MSlots* ins = def->toSlots();
     220           0 :             MOZ_ASSERT(ins->object() != 0);
     221           0 :             for (MUseIterator i(ins->usesBegin()); i != ins->usesEnd(); i++) {
     222             :                 // toDefinition should normally never fail, since they don't get
     223             :                 // captured by resume points.
     224           0 :                 MDefinition* def = (*i)->consumer()->toDefinition();
     225           0 :                 MOZ_ASSERT(def->op() == MDefinition::Op_StoreSlot ||
     226             :                            def->op() == MDefinition::Op_LoadSlot);
     227             :             }
     228             : #endif
     229           0 :             break;
     230             :           }
     231             : 
     232             :           case MDefinition::Op_GuardShape: {
     233           0 :             MGuardShape* guard = def->toGuardShape();
     234           0 :             MOZ_ASSERT(!ins->isGuardShape());
     235           0 :             if (obj->maybeShape() != guard->shape()) {
     236           0 :                 JitSpewDef(JitSpew_Escape, "has a non-matching guard shape\n", guard);
     237           0 :                 return true;
     238             :             }
     239           0 :             if (IsObjectEscaped(def->toInstruction(), obj)) {
     240           0 :                 JitSpewDef(JitSpew_Escape, "is indirectly escaped by\n", def);
     241           0 :                 return true;
     242             :             }
     243           0 :             break;
     244             :           }
     245             : 
     246             :           case MDefinition::Op_Lambda:
     247             :           case MDefinition::Op_LambdaArrow: {
     248           0 :             if (IsLambdaEscaped(def->toInstruction(), obj)) {
     249           0 :                 JitSpewDef(JitSpew_Escape, "is indirectly escaped by\n", def);
     250           0 :                 return true;
     251             :             }
     252           0 :             break;
     253             :           }
     254             : 
     255             :           // This instruction is a no-op used to verify that scalar replacement
     256             :           // is working as expected in jit-test.
     257             :           case MDefinition::Op_AssertRecoveredOnBailout:
     258           0 :             break;
     259             : 
     260             :           default:
     261           3 :             JitSpewDef(JitSpew_Escape, "is escaped by\n", def);
     262           3 :             return true;
     263             :         }
     264             :     }
     265             : 
     266           5 :     JitSpew(JitSpew_Escape, "Object is not escaped");
     267           5 :     return false;
     268             : }
     269             : 
     270             : class ObjectMemoryView : public MDefinitionVisitorDefaultNoop
     271             : {
     272             :   public:
     273             :     typedef MObjectState BlockState;
     274             :     static const char* phaseName;
     275             : 
     276             :   private:
     277             :     TempAllocator& alloc_;
     278             :     MConstant* undefinedVal_;
     279             :     MInstruction* obj_;
     280             :     MBasicBlock* startBlock_;
     281             :     BlockState* state_;
     282             : 
     283             :     // Used to improve the memory usage by sharing common modification.
     284             :     const MResumePoint* lastResumePoint_;
     285             : 
     286             :     bool oom_;
     287             : 
     288             :   public:
     289             :     ObjectMemoryView(TempAllocator& alloc, MInstruction* obj);
     290             : 
     291             :     MBasicBlock* startingBlock();
     292             :     bool initStartingState(BlockState** pState);
     293             : 
     294             :     void setEntryBlockState(BlockState* state);
     295             :     bool mergeIntoSuccessorState(MBasicBlock* curr, MBasicBlock* succ, BlockState** pSuccState);
     296             : 
     297             : #ifdef DEBUG
     298             :     void assertSuccess();
     299             : #else
     300             :     void assertSuccess() {}
     301             : #endif
     302             : 
     303        2716 :     bool oom() const { return oom_; }
     304             : 
     305             :   public:
     306             :     void visitResumePoint(MResumePoint* rp);
     307             :     void visitObjectState(MObjectState* ins);
     308             :     void visitStoreFixedSlot(MStoreFixedSlot* ins);
     309             :     void visitLoadFixedSlot(MLoadFixedSlot* ins);
     310             :     void visitPostWriteBarrier(MPostWriteBarrier* ins);
     311             :     void visitStoreSlot(MStoreSlot* ins);
     312             :     void visitLoadSlot(MLoadSlot* ins);
     313             :     void visitGuardShape(MGuardShape* ins);
     314             :     void visitFunctionEnvironment(MFunctionEnvironment* ins);
     315             :     void visitLambda(MLambda* ins);
     316             :     void visitLambdaArrow(MLambdaArrow* ins);
     317             :     void visitStoreUnboxedScalar(MStoreUnboxedScalar* ins);
     318             :     void visitLoadUnboxedScalar(MLoadUnboxedScalar* ins);
     319             :     void visitStoreUnboxedObjectOrNull(MStoreUnboxedObjectOrNull* ins);
     320             :     void visitLoadUnboxedObjectOrNull(MLoadUnboxedObjectOrNull* ins);
     321             :     void visitStoreUnboxedString(MStoreUnboxedString* ins);
     322             :     void visitLoadUnboxedString(MLoadUnboxedString* ins);
     323             : 
     324             :   private:
     325             :     void storeOffset(MInstruction* ins, size_t offset, MDefinition* value);
     326             :     void loadOffset(MInstruction* ins, size_t offset);
     327             : };
     328             : 
     329             : const char* ObjectMemoryView::phaseName = "Scalar Replacement of Object";
     330             : 
     331           5 : ObjectMemoryView::ObjectMemoryView(TempAllocator& alloc, MInstruction* obj)
     332             :   : alloc_(alloc),
     333             :     obj_(obj),
     334           5 :     startBlock_(obj->block()),
     335             :     state_(nullptr),
     336             :     lastResumePoint_(nullptr),
     337          10 :     oom_(false)
     338             : {
     339             :     // Annotate snapshots RValue such that we recover the store first.
     340           5 :     obj_->setIncompleteObject();
     341             : 
     342             :     // Annotate the instruction such that we do not replace it by a
     343             :     // Magic(JS_OPTIMIZED_OUT) in case of removed uses.
     344           5 :     obj_->setImplicitlyUsedUnchecked();
     345           5 : }
     346             : 
     347             : MBasicBlock*
     348           5 : ObjectMemoryView::startingBlock()
     349             : {
     350           5 :     return startBlock_;
     351             : }
     352             : 
     353             : bool
     354           5 : ObjectMemoryView::initStartingState(BlockState** pState)
     355             : {
     356             :     // Uninitialized slots have an "undefined" value.
     357           5 :     undefinedVal_ = MConstant::New(alloc_, UndefinedValue());
     358           5 :     startBlock_->insertBefore(obj_, undefinedVal_);
     359             : 
     360             :     // Create a new block state and insert at it at the location of the new object.
     361           5 :     BlockState* state = BlockState::New(alloc_, obj_);
     362           5 :     if (!state)
     363           0 :         return false;
     364             : 
     365           5 :     startBlock_->insertAfter(obj_, state);
     366             : 
     367             :     // Initialize the properties of the object state.
     368           5 :     if (!state->initFromTemplateObject(alloc_, undefinedVal_))
     369           0 :         return false;
     370             : 
     371             :     // Hold out of resume point until it is visited.
     372           5 :     state->setInWorklist();
     373             : 
     374           5 :     *pState = state;
     375           5 :     return true;
     376             : }
     377             : 
     378             : void
     379         313 : ObjectMemoryView::setEntryBlockState(BlockState* state)
     380             : {
     381         313 :     state_ = state;
     382         313 : }
     383             : 
     384             : bool
     385         361 : ObjectMemoryView::mergeIntoSuccessorState(MBasicBlock* curr, MBasicBlock* succ,
     386             :                                           BlockState** pSuccState)
     387             : {
     388         361 :     BlockState* succState = *pSuccState;
     389             : 
     390             :     // When a block has no state yet, create an empty one for the
     391             :     // successor.
     392         361 :     if (!succState) {
     393             :         // If the successor is not dominated then the object cannot flow
     394             :         // in this basic block without a Phi.  We know that no Phi exist
     395             :         // in non-dominated successors as the conservative escaped
     396             :         // analysis fails otherwise.  Such condition can succeed if the
     397             :         // successor is a join at the end of a if-block and the object
     398             :         // only exists within the branch.
     399         313 :         if (!startBlock_->dominates(succ))
     400           5 :             return true;
     401             : 
     402             :         // If there is only one predecessor, carry over the last state of the
     403             :         // block to the successor.  As the block state is immutable, if the
     404             :         // current block has multiple successors, they will share the same entry
     405             :         // state.
     406         308 :         if (succ->numPredecessors() <= 1 || !state_->numSlots()) {
     407         271 :             *pSuccState = state_;
     408         271 :             return true;
     409             :         }
     410             : 
     411             :         // If we have multiple predecessors, then we allocate one Phi node for
     412             :         // each predecessor, and create a new block state which only has phi
     413             :         // nodes.  These would later be removed by the removal of redundant phi
     414             :         // nodes.
     415          37 :         succState = BlockState::Copy(alloc_, state_);
     416          37 :         if (!succState)
     417           0 :             return false;
     418             : 
     419          37 :         size_t numPreds = succ->numPredecessors();
     420         130 :         for (size_t slot = 0; slot < state_->numSlots(); slot++) {
     421          93 :             MPhi* phi = MPhi::New(alloc_);
     422          93 :             if (!phi->reserveLength(numPreds))
     423           0 :                 return false;
     424             : 
     425             :             // Fill the input of the successors Phi with undefined
     426             :             // values, and each block later fills the Phi inputs.
     427         306 :             for (size_t p = 0; p < numPreds; p++)
     428         213 :                 phi->addInput(undefinedVal_);
     429             : 
     430             :             // Add Phi in the list of Phis of the basic block.
     431          93 :             succ->addPhi(phi);
     432          93 :             succState->setSlot(slot, phi);
     433             :         }
     434             : 
     435             :         // Insert the newly created block state instruction at the beginning
     436             :         // of the successor block, after all the phi nodes.  Note that it
     437             :         // would be captured by the entry resume point of the successor
     438             :         // block.
     439          37 :         succ->insertBefore(succ->safeInsertTop(), succState);
     440          37 :         *pSuccState = succState;
     441             :     }
     442             : 
     443          85 :     MOZ_ASSERT_IF(succ == startBlock_, startBlock_->isLoopHeader());
     444          85 :     if (succ->numPredecessors() > 1 && succState->numSlots() && succ != startBlock_) {
     445             :         // We need to re-compute successorWithPhis as the previous EliminatePhis
     446             :         // phase might have removed all the Phis from the successor block.
     447             :         size_t currIndex;
     448          85 :         MOZ_ASSERT(!succ->phisEmpty());
     449          85 :         if (curr->successorWithPhis()) {
     450          68 :             MOZ_ASSERT(curr->successorWithPhis() == succ);
     451          68 :             currIndex = curr->positionInPhiSuccessor();
     452             :         } else {
     453          17 :             currIndex = succ->indexForPredecessor(curr);
     454          17 :             curr->setSuccessorWithPhis(succ, currIndex);
     455             :         }
     456          85 :         MOZ_ASSERT(succ->getPredecessor(currIndex) == curr);
     457             : 
     458             :         // Copy the current slot states to the index of current block in all the
     459             :         // Phi created during the first visit of the successor.
     460         298 :         for (size_t slot = 0; slot < state_->numSlots(); slot++) {
     461         213 :             MPhi* phi = succState->getSlot(slot)->toPhi();
     462         213 :             phi->replaceOperand(currIndex, state_->getSlot(slot));
     463             :         }
     464             :     }
     465             : 
     466          85 :     return true;
     467             : }
     468             : 
     469             : #ifdef DEBUG
     470             : void
     471           5 : ObjectMemoryView::assertSuccess()
     472             : {
     473         523 :     for (MUseIterator i(obj_->usesBegin()); i != obj_->usesEnd(); i++) {
     474         518 :         MNode* ins = (*i)->consumer();
     475         518 :         MDefinition* def = nullptr;
     476             : 
     477             :         // Resume points have been replaced by the object state.
     478         518 :         if (ins->isResumePoint() || (def = ins->toDefinition())->isRecoveredOnBailout()) {
     479         518 :             MOZ_ASSERT(obj_->isIncompleteObject());
     480         518 :             continue;
     481             :         }
     482             : 
     483             :         // The only remaining uses would be removed by DCE, which will also
     484             :         // recover the object on bailouts.
     485           0 :         MOZ_ASSERT(def->isSlots() || def->isLambda() || def->isLambdaArrow());
     486           0 :         MOZ_ASSERT(!def->hasDefUses());
     487             :     }
     488           5 : }
     489             : #endif
     490             : 
     491             : void
     492         672 : ObjectMemoryView::visitResumePoint(MResumePoint* rp)
     493             : {
     494             :     // As long as the MObjectState is not yet seen next to the allocation, we do
     495             :     // not patch the resume point to recover the side effects.
     496         672 :     if (!state_->isInWorklist()) {
     497         658 :         rp->addStore(alloc_, state_, lastResumePoint_);
     498         658 :         lastResumePoint_ = rp;
     499             :     }
     500         672 : }
     501             : 
     502             : void
     503         103 : ObjectMemoryView::visitObjectState(MObjectState* ins)
     504             : {
     505         103 :     if (ins->isInWorklist())
     506           5 :         ins->setNotInWorklist();
     507         103 : }
     508             : 
     509             : void
     510          81 : ObjectMemoryView::visitStoreFixedSlot(MStoreFixedSlot* ins)
     511             : {
     512             :     // Skip stores made on other objects.
     513          81 :     if (ins->object() != obj_)
     514          54 :         return;
     515             : 
     516             :     // Clone the state and update the slot value.
     517          27 :     if (state_->hasFixedSlot(ins->slot())) {
     518          27 :         state_ = BlockState::Copy(alloc_, state_);
     519          27 :         if (!state_) {
     520           0 :             oom_ = true;
     521           0 :             return;
     522             :         }
     523             : 
     524          27 :         state_->setFixedSlot(ins->slot(), ins->value());
     525          27 :         ins->block()->insertBefore(ins->toInstruction(), state_);
     526             :     } else {
     527             :         // UnsafeSetReserveSlot can access baked-in slots which are guarded by
     528             :         // conditions, which are not seen by the escape analysis.
     529           0 :         MBail* bailout = MBail::New(alloc_, Bailout_Inevitable);
     530           0 :         ins->block()->insertBefore(ins, bailout);
     531             :     }
     532             : 
     533             :     // Remove original instruction.
     534          27 :     ins->block()->discard(ins);
     535             : }
     536             : 
     537             : void
     538          43 : ObjectMemoryView::visitLoadFixedSlot(MLoadFixedSlot* ins)
     539             : {
     540             :     // Skip loads made on other objects.
     541          43 :     if (ins->object() != obj_)
     542          28 :         return;
     543             : 
     544             :     // Replace load by the slot value.
     545          15 :     if (state_->hasFixedSlot(ins->slot())) {
     546          15 :         ins->replaceAllUsesWith(state_->getFixedSlot(ins->slot()));
     547             :     } else {
     548             :         // UnsafeGetReserveSlot can access baked-in slots which are guarded by
     549             :         // conditions, which are not seen by the escape analysis.
     550           0 :         MBail* bailout = MBail::New(alloc_, Bailout_Inevitable);
     551           0 :         ins->block()->insertBefore(ins, bailout);
     552           0 :         ins->replaceAllUsesWith(undefinedVal_);
     553             :     }
     554             : 
     555             :     // Remove original instruction.
     556          15 :     ins->block()->discard(ins);
     557             : }
     558             : 
     559             : void
     560          26 : ObjectMemoryView::visitPostWriteBarrier(MPostWriteBarrier* ins)
     561             : {
     562             :     // Skip loads made on other objects.
     563          26 :     if (ins->object() != obj_)
     564          18 :         return;
     565             : 
     566             :     // Remove original instruction.
     567           8 :     ins->block()->discard(ins);
     568             : }
     569             : 
     570             : void
     571           0 : ObjectMemoryView::visitStoreSlot(MStoreSlot* ins)
     572             : {
     573             :     // Skip stores made on other objects.
     574           0 :     MSlots* slots = ins->slots()->toSlots();
     575           0 :     if (slots->object() != obj_) {
     576             :         // Guard objects are replaced when they are visited.
     577           0 :         MOZ_ASSERT(!slots->object()->isGuardShape() || slots->object()->toGuardShape()->object() != obj_);
     578           0 :         return;
     579             :     }
     580             : 
     581             :     // Clone the state and update the slot value.
     582           0 :     if (state_->hasDynamicSlot(ins->slot())) {
     583           0 :         state_ = BlockState::Copy(alloc_, state_);
     584           0 :         if (!state_) {
     585           0 :             oom_ = true;
     586           0 :             return;
     587             :         }
     588             : 
     589           0 :         state_->setDynamicSlot(ins->slot(), ins->value());
     590           0 :         ins->block()->insertBefore(ins->toInstruction(), state_);
     591             :     } else {
     592             :         // UnsafeSetReserveSlot can access baked-in slots which are guarded by
     593             :         // conditions, which are not seen by the escape analysis.
     594           0 :         MBail* bailout = MBail::New(alloc_, Bailout_Inevitable);
     595           0 :         ins->block()->insertBefore(ins, bailout);
     596             :     }
     597             : 
     598             :     // Remove original instruction.
     599           0 :     ins->block()->discard(ins);
     600             : }
     601             : 
     602             : void
     603           0 : ObjectMemoryView::visitLoadSlot(MLoadSlot* ins)
     604             : {
     605             :     // Skip loads made on other objects.
     606           0 :     MSlots* slots = ins->slots()->toSlots();
     607           0 :     if (slots->object() != obj_) {
     608             :         // Guard objects are replaced when they are visited.
     609           0 :         MOZ_ASSERT(!slots->object()->isGuardShape() || slots->object()->toGuardShape()->object() != obj_);
     610           0 :         return;
     611             :     }
     612             : 
     613             :     // Replace load by the slot value.
     614           0 :     if (state_->hasDynamicSlot(ins->slot())) {
     615           0 :         ins->replaceAllUsesWith(state_->getDynamicSlot(ins->slot()));
     616             :     } else {
     617             :         // UnsafeGetReserveSlot can access baked-in slots which are guarded by
     618             :         // conditions, which are not seen by the escape analysis.
     619           0 :         MBail* bailout = MBail::New(alloc_, Bailout_Inevitable);
     620           0 :         ins->block()->insertBefore(ins, bailout);
     621           0 :         ins->replaceAllUsesWith(undefinedVal_);
     622             :     }
     623             : 
     624             :     // Remove original instruction.
     625           0 :     ins->block()->discard(ins);
     626             : }
     627             : 
     628             : void
     629           0 : ObjectMemoryView::visitGuardShape(MGuardShape* ins)
     630             : {
     631             :     // Skip loads made on other objects.
     632           0 :     if (ins->object() != obj_)
     633           0 :         return;
     634             : 
     635             :     // Replace the shape guard by its object.
     636           0 :     ins->replaceAllUsesWith(obj_);
     637             : 
     638             :     // Remove original instruction.
     639           0 :     ins->block()->discard(ins);
     640             : }
     641             : 
     642             : void
     643           0 : ObjectMemoryView::visitFunctionEnvironment(MFunctionEnvironment* ins)
     644             : {
     645             :     // Skip function environment which are not aliases of the NewCallObject.
     646           0 :     MDefinition* input = ins->input();
     647           0 :     if (input->isLambda()) {
     648           0 :         if (input->toLambda()->environmentChain() != obj_)
     649           0 :             return;
     650           0 :     } else if (input->isLambdaArrow()) {
     651           0 :         if (input->toLambdaArrow()->environmentChain() != obj_)
     652           0 :             return;
     653             :     } else {
     654           0 :         return;
     655             :     }
     656             : 
     657             :     // Replace the function environment by the scope chain of the lambda.
     658           0 :     ins->replaceAllUsesWith(obj_);
     659             : 
     660             :     // Remove original instruction.
     661           0 :     ins->block()->discard(ins);
     662             : }
     663             : 
     664             : void
     665           0 : ObjectMemoryView::visitLambda(MLambda* ins)
     666             : {
     667           0 :     if (ins->environmentChain() != obj_)
     668           0 :         return;
     669             : 
     670             :     // In order to recover the lambda we need to recover the scope chain, as the
     671             :     // lambda is holding it.
     672           0 :     ins->setIncompleteObject();
     673             : }
     674             : 
     675             : void
     676           0 : ObjectMemoryView::visitLambdaArrow(MLambdaArrow* ins)
     677             : {
     678           0 :     if (ins->environmentChain() != obj_)
     679           0 :         return;
     680             : 
     681           0 :     ins->setIncompleteObject();
     682             : }
     683             : 
     684             : static size_t
     685           0 : GetOffsetOf(MDefinition* index, size_t width, int32_t baseOffset)
     686             : {
     687           0 :     int32_t idx = index->toConstant()->toInt32();
     688           0 :     MOZ_ASSERT(idx >= 0);
     689           0 :     MOZ_ASSERT(baseOffset >= 0 && size_t(baseOffset) >= UnboxedPlainObject::offsetOfData());
     690           0 :     return idx * width + baseOffset - UnboxedPlainObject::offsetOfData();
     691             : }
     692             : 
     693             : static size_t
     694           0 : GetOffsetOf(MDefinition* index, Scalar::Type type, int32_t baseOffset)
     695             : {
     696           0 :     return GetOffsetOf(index, Scalar::byteSize(type), baseOffset);
     697             : }
     698             : 
     699             : void
     700           0 : ObjectMemoryView::storeOffset(MInstruction* ins, size_t offset, MDefinition* value)
     701             : {
     702             :     // Clone the state and update the slot value.
     703           0 :     MOZ_ASSERT(state_->hasOffset(offset));
     704           0 :     state_ = BlockState::Copy(alloc_, state_);
     705           0 :     if (!state_) {
     706           0 :         oom_ = true;
     707           0 :         return;
     708             :     }
     709             : 
     710           0 :     state_->setOffset(offset, value);
     711           0 :     ins->block()->insertBefore(ins, state_);
     712             : 
     713             :     // Remove original instruction.
     714           0 :     ins->block()->discard(ins);
     715             : }
     716             : 
     717             : void
     718           0 : ObjectMemoryView::loadOffset(MInstruction* ins, size_t offset)
     719             : {
     720             :     // Replace load by the slot value.
     721           0 :     MOZ_ASSERT(state_->hasOffset(offset));
     722           0 :     ins->replaceAllUsesWith(state_->getOffset(offset));
     723             : 
     724             :     // Remove original instruction.
     725           0 :     ins->block()->discard(ins);
     726           0 : }
     727             : 
     728             : void
     729           0 : ObjectMemoryView::visitStoreUnboxedScalar(MStoreUnboxedScalar* ins)
     730             : {
     731             :     // Skip stores made on other objects.
     732           0 :     if (ins->elements() != obj_)
     733           0 :         return;
     734             : 
     735           0 :     size_t offset = GetOffsetOf(ins->index(), ins->storageType(), ins->offsetAdjustment());
     736           0 :     storeOffset(ins, offset, ins->value());
     737             : }
     738             : 
     739             : void
     740           0 : ObjectMemoryView::visitLoadUnboxedScalar(MLoadUnboxedScalar* ins)
     741             : {
     742             :     // Skip loads made on other objects.
     743           0 :     if (ins->elements() != obj_)
     744           0 :         return;
     745             : 
     746             :     // Replace load by the slot value.
     747           0 :     size_t offset = GetOffsetOf(ins->index(), ins->storageType(), ins->offsetAdjustment());
     748           0 :     loadOffset(ins, offset);
     749             : }
     750             : 
     751             : void
     752           0 : ObjectMemoryView::visitStoreUnboxedObjectOrNull(MStoreUnboxedObjectOrNull* ins)
     753             : {
     754             :     // Skip stores made on other objects.
     755           0 :     if (ins->elements() != obj_)
     756           0 :         return;
     757             : 
     758             :     // Clone the state and update the slot value.
     759           0 :     size_t offset = GetOffsetOf(ins->index(), sizeof(uintptr_t), ins->offsetAdjustment());
     760           0 :     storeOffset(ins, offset, ins->value());
     761             : }
     762             : 
     763             : void
     764           0 : ObjectMemoryView::visitLoadUnboxedObjectOrNull(MLoadUnboxedObjectOrNull* ins)
     765             : {
     766             :     // Skip loads made on other objects.
     767           0 :     if (ins->elements() != obj_)
     768           0 :         return;
     769             : 
     770             :     // Replace load by the slot value.
     771           0 :     size_t offset = GetOffsetOf(ins->index(), sizeof(uintptr_t), ins->offsetAdjustment());
     772           0 :     loadOffset(ins, offset);
     773             : }
     774             : 
     775             : void
     776           0 : ObjectMemoryView::visitStoreUnboxedString(MStoreUnboxedString* ins)
     777             : {
     778             :     // Skip stores made on other objects.
     779           0 :     if (ins->elements() != obj_)
     780           0 :         return;
     781             : 
     782             :     // Clone the state and update the slot value.
     783           0 :     size_t offset = GetOffsetOf(ins->index(), sizeof(uintptr_t), ins->offsetAdjustment());
     784           0 :     storeOffset(ins, offset, ins->value());
     785             : }
     786             : 
     787             : void
     788           0 : ObjectMemoryView::visitLoadUnboxedString(MLoadUnboxedString* ins)
     789             : {
     790             :     // Skip loads made on other objects.
     791           0 :     if (ins->elements() != obj_)
     792           0 :         return;
     793             : 
     794             :     // Replace load by the slot value.
     795           0 :     size_t offset = GetOffsetOf(ins->index(), sizeof(uintptr_t), ins->offsetAdjustment());
     796           0 :     loadOffset(ins, offset);
     797             : }
     798             : 
     799             : static bool
     800           0 : IndexOf(MDefinition* ins, int32_t* res)
     801             : {
     802           0 :     MOZ_ASSERT(ins->isLoadElement() || ins->isStoreElement());
     803           0 :     MDefinition* indexDef = ins->getOperand(1); // ins->index();
     804           0 :     if (indexDef->isBoundsCheck())
     805           0 :         indexDef = indexDef->toBoundsCheck()->index();
     806           0 :     if (indexDef->isToInt32())
     807           0 :         indexDef = indexDef->toToInt32()->getOperand(0);
     808           0 :     MConstant* indexDefConst = indexDef->maybeConstantValue();
     809           0 :     if (!indexDefConst || indexDefConst->type() != MIRType::Int32)
     810           0 :         return false;
     811           0 :     *res = indexDefConst->toInt32();
     812           0 :     return true;
     813             : }
     814             : 
     815             : // Returns False if the elements is not escaped and if it is optimizable by
     816             : // ScalarReplacementOfArray.
     817             : static bool
     818           0 : IsElementEscaped(MElements* def, uint32_t arraySize)
     819             : {
     820           0 :     JitSpewDef(JitSpew_Escape, "Check elements\n", def);
     821           0 :     JitSpewIndent spewIndent(JitSpew_Escape);
     822             : 
     823           0 :     for (MUseIterator i(def->usesBegin()); i != def->usesEnd(); i++) {
     824             :         // The MIRType::Elements cannot be captured in a resume point as
     825             :         // it does not represent a value allocation.
     826           0 :         MDefinition* access = (*i)->consumer()->toDefinition();
     827             : 
     828           0 :         switch (access->op()) {
     829             :           case MDefinition::Op_LoadElement: {
     830           0 :             MOZ_ASSERT(access->toLoadElement()->elements() == def);
     831             : 
     832             :             // If we need hole checks, then the array cannot be escaped
     833             :             // as the array might refer to the prototype chain to look
     834             :             // for properties, thus it might do additional side-effects
     835             :             // which are not reflected by the alias set, is we are
     836             :             // bailing on holes.
     837           0 :             if (access->toLoadElement()->needsHoleCheck()) {
     838             :                 JitSpewDef(JitSpew_Escape,
     839           0 :                            "has a load element with a hole check\n", access);
     840           0 :                 return true;
     841             :             }
     842             : 
     843             :             // If the index is not a constant then this index can alias
     844             :             // all others. We do not handle this case.
     845             :             int32_t index;
     846           0 :             if (!IndexOf(access, &index)) {
     847             :                 JitSpewDef(JitSpew_Escape,
     848           0 :                            "has a load element with a non-trivial index\n", access);
     849           0 :                 return true;
     850             :             }
     851           0 :             if (index < 0 || arraySize <= uint32_t(index)) {
     852             :                 JitSpewDef(JitSpew_Escape,
     853           0 :                            "has a load element with an out-of-bound index\n", access);
     854           0 :                 return true;
     855             :             }
     856           0 :             break;
     857             :           }
     858             : 
     859             :           case MDefinition::Op_StoreElement: {
     860           0 :             MOZ_ASSERT(access->toStoreElement()->elements() == def);
     861             : 
     862             :             // If we need hole checks, then the array cannot be escaped
     863             :             // as the array might refer to the prototype chain to look
     864             :             // for properties, thus it might do additional side-effects
     865             :             // which are not reflected by the alias set, is we are
     866             :             // bailing on holes.
     867           0 :             if (access->toStoreElement()->needsHoleCheck()) {
     868             :                 JitSpewDef(JitSpew_Escape,
     869           0 :                            "has a store element with a hole check\n", access);
     870           0 :                 return true;
     871             :             }
     872             : 
     873             :             // If the index is not a constant then this index can alias
     874             :             // all others. We do not handle this case.
     875             :             int32_t index;
     876           0 :             if (!IndexOf(access, &index)) {
     877           0 :                 JitSpewDef(JitSpew_Escape, "has a store element with a non-trivial index\n", access);
     878           0 :                 return true;
     879             :             }
     880           0 :             if (index < 0 || arraySize <= uint32_t(index)) {
     881           0 :                 JitSpewDef(JitSpew_Escape, "has a store element with an out-of-bound index\n", access);
     882           0 :                 return true;
     883             :             }
     884             : 
     885             :             // We are not yet encoding magic hole constants in resume points.
     886           0 :             if (access->toStoreElement()->value()->type() == MIRType::MagicHole) {
     887           0 :                 JitSpewDef(JitSpew_Escape, "has a store element with an magic-hole constant\n", access);
     888           0 :                 return true;
     889             :             }
     890           0 :             break;
     891             :           }
     892             : 
     893             :           case MDefinition::Op_SetInitializedLength:
     894           0 :             MOZ_ASSERT(access->toSetInitializedLength()->elements() == def);
     895           0 :             break;
     896             : 
     897             :           case MDefinition::Op_InitializedLength:
     898           0 :             MOZ_ASSERT(access->toInitializedLength()->elements() == def);
     899           0 :             break;
     900             : 
     901             :           case MDefinition::Op_ArrayLength:
     902           0 :             MOZ_ASSERT(access->toArrayLength()->elements() == def);
     903           0 :             break;
     904             : 
     905             :           default:
     906           0 :             JitSpewDef(JitSpew_Escape, "is escaped by\n", access);
     907           0 :             return true;
     908             :         }
     909             :     }
     910           0 :     JitSpew(JitSpew_Escape, "Elements is not escaped");
     911           0 :     return false;
     912             : }
     913             : 
     914             : // Returns False if the array is not escaped and if it is optimizable by
     915             : // ScalarReplacementOfArray.
     916             : //
     917             : // For the moment, this code is dumb as it only supports arrays which are not
     918             : // changing length, with only access with known constants.
     919             : static bool
     920           5 : IsArrayEscaped(MInstruction* ins)
     921             : {
     922           5 :     MOZ_ASSERT(ins->type() == MIRType::Object);
     923           5 :     MOZ_ASSERT(ins->isNewArray());
     924           5 :     uint32_t length = ins->toNewArray()->length();
     925             : 
     926           5 :     JitSpewDef(JitSpew_Escape, "Check array\n", ins);
     927          10 :     JitSpewIndent spewIndent(JitSpew_Escape);
     928             : 
     929           5 :     JSObject* obj = ins->toNewArray()->templateObject();
     930           5 :     if (!obj) {
     931           0 :         JitSpew(JitSpew_Escape, "No template object defined.");
     932           0 :         return true;
     933             :     }
     934             : 
     935           5 :     if (obj->is<UnboxedArrayObject>()) {
     936           0 :         JitSpew(JitSpew_Escape, "Template object is an unboxed plain object.");
     937           0 :         return true;
     938             :     }
     939             : 
     940           5 :     if (length >= 16) {
     941           0 :         JitSpew(JitSpew_Escape, "Array has too many elements");
     942           0 :         return true;
     943             :     }
     944             : 
     945             :     // Check if the object is escaped. If the object is not the first argument
     946             :     // of either a known Store / Load, then we consider it as escaped. This is a
     947             :     // cheap and conservative escape analysis.
     948           9 :     for (MUseIterator i(ins->usesBegin()); i != ins->usesEnd(); i++) {
     949           9 :         MNode* consumer = (*i)->consumer();
     950           9 :         if (!consumer->isDefinition()) {
     951             :             // Cannot optimize if it is observable from fun.arguments or others.
     952           4 :             if (!consumer->toResumePoint()->isRecoverableOperand(*i)) {
     953           0 :                 JitSpew(JitSpew_Escape, "Observable array cannot be recovered");
     954           5 :                 return true;
     955             :             }
     956           4 :             continue;
     957             :         }
     958             : 
     959           5 :         MDefinition* def = consumer->toDefinition();
     960           5 :         switch (def->op()) {
     961             :           case MDefinition::Op_Elements: {
     962           0 :             MElements *elem = def->toElements();
     963           0 :             MOZ_ASSERT(elem->object() == ins);
     964           0 :             if (IsElementEscaped(elem, length)) {
     965           0 :                 JitSpewDef(JitSpew_Escape, "is indirectly escaped by\n", elem);
     966           0 :                 return true;
     967             :             }
     968             : 
     969           0 :             break;
     970             :           }
     971             : 
     972             :           // This instruction is a no-op used to verify that scalar replacement
     973             :           // is working as expected in jit-test.
     974             :           case MDefinition::Op_AssertRecoveredOnBailout:
     975           0 :             break;
     976             : 
     977             :           default:
     978           5 :             JitSpewDef(JitSpew_Escape, "is escaped by\n", def);
     979           5 :             return true;
     980             :         }
     981             :     }
     982             : 
     983           0 :     JitSpew(JitSpew_Escape, "Array is not escaped");
     984           0 :     return false;
     985             : }
     986             : 
     987             : // This class replaces every MStoreElement and MSetInitializedLength by an
     988             : // MArrayState which emulates the content of the array. All MLoadElement,
     989             : // MInitializedLength and MArrayLength are replaced by the corresponding value.
     990             : //
     991             : // In order to restore the value of the array correctly in case of bailouts, we
     992             : // replace all reference of the allocation by the MArrayState definition.
     993             : class ArrayMemoryView : public MDefinitionVisitorDefaultNoop
     994             : {
     995             :   public:
     996             :     typedef MArrayState BlockState;
     997             :     static const char* phaseName;
     998             : 
     999             :   private:
    1000             :     TempAllocator& alloc_;
    1001             :     MConstant* undefinedVal_;
    1002             :     MConstant* length_;
    1003             :     MInstruction* arr_;
    1004             :     MBasicBlock* startBlock_;
    1005             :     BlockState* state_;
    1006             : 
    1007             :     // Used to improve the memory usage by sharing common modification.
    1008             :     const MResumePoint* lastResumePoint_;
    1009             : 
    1010             :     bool oom_;
    1011             : 
    1012             :   public:
    1013             :     ArrayMemoryView(TempAllocator& alloc, MInstruction* arr);
    1014             : 
    1015             :     MBasicBlock* startingBlock();
    1016             :     bool initStartingState(BlockState** pState);
    1017             : 
    1018             :     void setEntryBlockState(BlockState* state);
    1019             :     bool mergeIntoSuccessorState(MBasicBlock* curr, MBasicBlock* succ, BlockState** pSuccState);
    1020             : 
    1021             : #ifdef DEBUG
    1022             :     void assertSuccess();
    1023             : #else
    1024             :     void assertSuccess() {}
    1025             : #endif
    1026             : 
    1027           0 :     bool oom() const { return oom_; }
    1028             : 
    1029             :   private:
    1030             :     bool isArrayStateElements(MDefinition* elements);
    1031             :     void discardInstruction(MInstruction* ins, MDefinition* elements);
    1032             : 
    1033             :   public:
    1034             :     void visitResumePoint(MResumePoint* rp);
    1035             :     void visitArrayState(MArrayState* ins);
    1036             :     void visitStoreElement(MStoreElement* ins);
    1037             :     void visitLoadElement(MLoadElement* ins);
    1038             :     void visitSetInitializedLength(MSetInitializedLength* ins);
    1039             :     void visitInitializedLength(MInitializedLength* ins);
    1040             :     void visitArrayLength(MArrayLength* ins);
    1041             : };
    1042             : 
    1043             : const char* ArrayMemoryView::phaseName = "Scalar Replacement of Array";
    1044             : 
    1045           0 : ArrayMemoryView::ArrayMemoryView(TempAllocator& alloc, MInstruction* arr)
    1046             :   : alloc_(alloc),
    1047             :     undefinedVal_(nullptr),
    1048             :     length_(nullptr),
    1049             :     arr_(arr),
    1050           0 :     startBlock_(arr->block()),
    1051             :     state_(nullptr),
    1052             :     lastResumePoint_(nullptr),
    1053           0 :     oom_(false)
    1054             : {
    1055             :     // Annotate snapshots RValue such that we recover the store first.
    1056           0 :     arr_->setIncompleteObject();
    1057             : 
    1058             :     // Annotate the instruction such that we do not replace it by a
    1059             :     // Magic(JS_OPTIMIZED_OUT) in case of removed uses.
    1060           0 :     arr_->setImplicitlyUsedUnchecked();
    1061           0 : }
    1062             : 
    1063             : MBasicBlock*
    1064           0 : ArrayMemoryView::startingBlock()
    1065             : {
    1066           0 :     return startBlock_;
    1067             : }
    1068             : 
    1069             : bool
    1070           0 : ArrayMemoryView::initStartingState(BlockState** pState)
    1071             : {
    1072             :     // Uninitialized elements have an "undefined" value.
    1073           0 :     undefinedVal_ = MConstant::New(alloc_, UndefinedValue());
    1074           0 :     MConstant* initLength = MConstant::New(alloc_, Int32Value(0));
    1075           0 :     arr_->block()->insertBefore(arr_, undefinedVal_);
    1076           0 :     arr_->block()->insertBefore(arr_, initLength);
    1077             : 
    1078             :     // Create a new block state and insert at it at the location of the new array.
    1079           0 :     BlockState* state = BlockState::New(alloc_, arr_, undefinedVal_, initLength);
    1080           0 :     if (!state)
    1081           0 :         return false;
    1082             : 
    1083           0 :     startBlock_->insertAfter(arr_, state);
    1084             : 
    1085             :     // Hold out of resume point until it is visited.
    1086           0 :     state->setInWorklist();
    1087             : 
    1088           0 :     *pState = state;
    1089           0 :     return true;
    1090             : }
    1091             : 
    1092             : void
    1093           0 : ArrayMemoryView::setEntryBlockState(BlockState* state)
    1094             : {
    1095           0 :     state_ = state;
    1096           0 : }
    1097             : 
    1098             : bool
    1099           0 : ArrayMemoryView::mergeIntoSuccessorState(MBasicBlock* curr, MBasicBlock* succ,
    1100             :                                           BlockState** pSuccState)
    1101             : {
    1102           0 :     BlockState* succState = *pSuccState;
    1103             : 
    1104             :     // When a block has no state yet, create an empty one for the
    1105             :     // successor.
    1106           0 :     if (!succState) {
    1107             :         // If the successor is not dominated then the array cannot flow
    1108             :         // in this basic block without a Phi.  We know that no Phi exist
    1109             :         // in non-dominated successors as the conservative escaped
    1110             :         // analysis fails otherwise.  Such condition can succeed if the
    1111             :         // successor is a join at the end of a if-block and the array
    1112             :         // only exists within the branch.
    1113           0 :         if (!startBlock_->dominates(succ))
    1114           0 :             return true;
    1115             : 
    1116             :         // If there is only one predecessor, carry over the last state of the
    1117             :         // block to the successor.  As the block state is immutable, if the
    1118             :         // current block has multiple successors, they will share the same entry
    1119             :         // state.
    1120           0 :         if (succ->numPredecessors() <= 1 || !state_->numElements()) {
    1121           0 :             *pSuccState = state_;
    1122           0 :             return true;
    1123             :         }
    1124             : 
    1125             :         // If we have multiple predecessors, then we allocate one Phi node for
    1126             :         // each predecessor, and create a new block state which only has phi
    1127             :         // nodes.  These would later be removed by the removal of redundant phi
    1128             :         // nodes.
    1129           0 :         succState = BlockState::Copy(alloc_, state_);
    1130           0 :         if (!succState)
    1131           0 :             return false;
    1132             : 
    1133           0 :         size_t numPreds = succ->numPredecessors();
    1134           0 :         for (size_t index = 0; index < state_->numElements(); index++) {
    1135           0 :             MPhi* phi = MPhi::New(alloc_);
    1136           0 :             if (!phi->reserveLength(numPreds))
    1137           0 :                 return false;
    1138             : 
    1139             :             // Fill the input of the successors Phi with undefined
    1140             :             // values, and each block later fills the Phi inputs.
    1141           0 :             for (size_t p = 0; p < numPreds; p++)
    1142           0 :                 phi->addInput(undefinedVal_);
    1143             : 
    1144             :             // Add Phi in the list of Phis of the basic block.
    1145           0 :             succ->addPhi(phi);
    1146           0 :             succState->setElement(index, phi);
    1147             :         }
    1148             : 
    1149             :         // Insert the newly created block state instruction at the beginning
    1150             :         // of the successor block, after all the phi nodes.  Note that it
    1151             :         // would be captured by the entry resume point of the successor
    1152             :         // block.
    1153           0 :         succ->insertBefore(succ->safeInsertTop(), succState);
    1154           0 :         *pSuccState = succState;
    1155             :     }
    1156             : 
    1157           0 :     MOZ_ASSERT_IF(succ == startBlock_, startBlock_->isLoopHeader());
    1158           0 :     if (succ->numPredecessors() > 1 && succState->numElements() && succ != startBlock_) {
    1159             :         // We need to re-compute successorWithPhis as the previous EliminatePhis
    1160             :         // phase might have removed all the Phis from the successor block.
    1161             :         size_t currIndex;
    1162           0 :         MOZ_ASSERT(!succ->phisEmpty());
    1163           0 :         if (curr->successorWithPhis()) {
    1164           0 :             MOZ_ASSERT(curr->successorWithPhis() == succ);
    1165           0 :             currIndex = curr->positionInPhiSuccessor();
    1166             :         } else {
    1167           0 :             currIndex = succ->indexForPredecessor(curr);
    1168           0 :             curr->setSuccessorWithPhis(succ, currIndex);
    1169             :         }
    1170           0 :         MOZ_ASSERT(succ->getPredecessor(currIndex) == curr);
    1171             : 
    1172             :         // Copy the current element states to the index of current block in all
    1173             :         // the Phi created during the first visit of the successor.
    1174           0 :         for (size_t index = 0; index < state_->numElements(); index++) {
    1175           0 :             MPhi* phi = succState->getElement(index)->toPhi();
    1176           0 :             phi->replaceOperand(currIndex, state_->getElement(index));
    1177             :         }
    1178             :     }
    1179             : 
    1180           0 :     return true;
    1181             : }
    1182             : 
    1183             : #ifdef DEBUG
    1184             : void
    1185           0 : ArrayMemoryView::assertSuccess()
    1186             : {
    1187           0 :     MOZ_ASSERT(!arr_->hasLiveDefUses());
    1188           0 : }
    1189             : #endif
    1190             : 
    1191             : void
    1192           0 : ArrayMemoryView::visitResumePoint(MResumePoint* rp)
    1193             : {
    1194             :     // As long as the MArrayState is not yet seen next to the allocation, we do
    1195             :     // not patch the resume point to recover the side effects.
    1196           0 :     if (!state_->isInWorklist()) {
    1197           0 :         rp->addStore(alloc_, state_, lastResumePoint_);
    1198           0 :         lastResumePoint_ = rp;
    1199             :     }
    1200           0 : }
    1201             : 
    1202             : void
    1203           0 : ArrayMemoryView::visitArrayState(MArrayState* ins)
    1204             : {
    1205           0 :     if (ins->isInWorklist())
    1206           0 :         ins->setNotInWorklist();
    1207           0 : }
    1208             : 
    1209             : bool
    1210           0 : ArrayMemoryView::isArrayStateElements(MDefinition* elements)
    1211             : {
    1212           0 :     return elements->isElements() && elements->toElements()->object() == arr_;
    1213             : }
    1214             : 
    1215             : void
    1216           0 : ArrayMemoryView::discardInstruction(MInstruction* ins, MDefinition* elements)
    1217             : {
    1218           0 :     MOZ_ASSERT(elements->isElements());
    1219           0 :     ins->block()->discard(ins);
    1220           0 :     if (!elements->hasLiveDefUses())
    1221           0 :         elements->block()->discard(elements->toInstruction());
    1222           0 : }
    1223             : 
    1224             : void
    1225           0 : ArrayMemoryView::visitStoreElement(MStoreElement* ins)
    1226             : {
    1227             :     // Skip other array objects.
    1228           0 :     MDefinition* elements = ins->elements();
    1229           0 :     if (!isArrayStateElements(elements))
    1230           0 :         return;
    1231             : 
    1232             :     // Register value of the setter in the state.
    1233             :     int32_t index;
    1234           0 :     MOZ_ALWAYS_TRUE(IndexOf(ins, &index));
    1235           0 :     state_ = BlockState::Copy(alloc_, state_);
    1236           0 :     if (!state_) {
    1237           0 :         oom_ = true;
    1238           0 :         return;
    1239             :     }
    1240             : 
    1241           0 :     state_->setElement(index, ins->value());
    1242           0 :     ins->block()->insertBefore(ins, state_);
    1243             : 
    1244             :     // Remove original instruction.
    1245           0 :     discardInstruction(ins, elements);
    1246             : }
    1247             : 
    1248             : void
    1249           0 : ArrayMemoryView::visitLoadElement(MLoadElement* ins)
    1250             : {
    1251             :     // Skip other array objects.
    1252           0 :     MDefinition* elements = ins->elements();
    1253           0 :     if (!isArrayStateElements(elements))
    1254           0 :         return;
    1255             : 
    1256             :     // Replace by the value contained at the index.
    1257             :     int32_t index;
    1258           0 :     MOZ_ALWAYS_TRUE(IndexOf(ins, &index));
    1259           0 :     ins->replaceAllUsesWith(state_->getElement(index));
    1260             : 
    1261             :     // Remove original instruction.
    1262           0 :     discardInstruction(ins, elements);
    1263             : }
    1264             : 
    1265             : void
    1266           0 : ArrayMemoryView::visitSetInitializedLength(MSetInitializedLength* ins)
    1267             : {
    1268             :     // Skip other array objects.
    1269           0 :     MDefinition* elements = ins->elements();
    1270           0 :     if (!isArrayStateElements(elements))
    1271           0 :         return;
    1272             : 
    1273             :     // Replace by the new initialized length.  Note that the argument of
    1274             :     // MSetInitalizedLength is the last index and not the initialized length.
    1275             :     // To obtain the length, we need to add 1 to it, and thus we need to create
    1276             :     // a new constant that we register in the ArrayState.
    1277           0 :     state_ = BlockState::Copy(alloc_, state_);
    1278           0 :     if (!state_) {
    1279           0 :         oom_ = true;
    1280           0 :         return;
    1281             :     }
    1282             : 
    1283           0 :     int32_t initLengthValue = ins->index()->maybeConstantValue()->toInt32() + 1;
    1284           0 :     MConstant* initLength = MConstant::New(alloc_, Int32Value(initLengthValue));
    1285           0 :     ins->block()->insertBefore(ins, initLength);
    1286           0 :     ins->block()->insertBefore(ins, state_);
    1287           0 :     state_->setInitializedLength(initLength);
    1288             : 
    1289             :     // Remove original instruction.
    1290           0 :     discardInstruction(ins, elements);
    1291             : }
    1292             : 
    1293             : void
    1294           0 : ArrayMemoryView::visitInitializedLength(MInitializedLength* ins)
    1295             : {
    1296             :     // Skip other array objects.
    1297           0 :     MDefinition* elements = ins->elements();
    1298           0 :     if (!isArrayStateElements(elements))
    1299           0 :         return;
    1300             : 
    1301             :     // Replace by the value of the length.
    1302           0 :     ins->replaceAllUsesWith(state_->initializedLength());
    1303             : 
    1304             :     // Remove original instruction.
    1305           0 :     discardInstruction(ins, elements);
    1306             : }
    1307             : 
    1308             : void
    1309           0 : ArrayMemoryView::visitArrayLength(MArrayLength* ins)
    1310             : {
    1311             :     // Skip other array objects.
    1312           0 :     MDefinition* elements = ins->elements();
    1313           0 :     if (!isArrayStateElements(elements))
    1314           0 :         return;
    1315             : 
    1316             :     // Replace by the value of the length.
    1317           0 :     if (!length_) {
    1318           0 :         length_ = MConstant::New(alloc_, Int32Value(state_->numElements()));
    1319           0 :         arr_->block()->insertBefore(arr_, length_);
    1320             :     }
    1321           0 :     ins->replaceAllUsesWith(length_);
    1322             : 
    1323             :     // Remove original instruction.
    1324           0 :     discardInstruction(ins, elements);
    1325             : }
    1326             : 
    1327             : bool
    1328           8 : ScalarReplacement(MIRGenerator* mir, MIRGraph& graph)
    1329             : {
    1330          16 :     EmulateStateOf<ObjectMemoryView> replaceObject(mir, graph);
    1331          16 :     EmulateStateOf<ArrayMemoryView> replaceArray(mir, graph);
    1332           8 :     bool addedPhi = false;
    1333             : 
    1334         461 :     for (ReversePostorderIterator block = graph.rpoBegin(); block != graph.rpoEnd(); block++) {
    1335         453 :         if (mir->shouldCancel("Scalar Replacement (main loop)"))
    1336           0 :             return false;
    1337             : 
    1338        2748 :         for (MInstructionIterator ins = block->begin(); ins != block->end(); ins++) {
    1339        2295 :             if (IsOptimizableObjectInstruction(*ins) && !IsObjectEscaped(*ins))
    1340             :             {
    1341           5 :                 ObjectMemoryView view(graph.alloc(), *ins);
    1342           5 :                 if (!replaceObject.run(view))
    1343           0 :                     return false;
    1344           5 :                 view.assertSuccess();
    1345           5 :                 addedPhi = true;
    1346           5 :                 continue;
    1347             :             }
    1348             : 
    1349        2290 :             if (ins->isNewArray() && !IsArrayEscaped(*ins)) {
    1350           0 :                 ArrayMemoryView view(graph.alloc(), *ins);
    1351           0 :                 if (!replaceArray.run(view))
    1352           0 :                     return false;
    1353           0 :                 view.assertSuccess();
    1354           0 :                 addedPhi = true;
    1355           0 :                 continue;
    1356             :             }
    1357             :         }
    1358             :     }
    1359             : 
    1360           8 :     if (addedPhi) {
    1361             :         // Phis added by Scalar Replacement are only redundant Phis which are
    1362             :         // not directly captured by any resume point but only by the MDefinition
    1363             :         // state. The conservative observability only focuses on Phis which are
    1364             :         // not used as resume points operands.
    1365           1 :         AssertExtendedGraphCoherency(graph);
    1366           1 :         if (!EliminatePhis(mir, graph, ConservativeObservability))
    1367           0 :             return false;
    1368             :     }
    1369             : 
    1370           8 :     return true;
    1371             : }
    1372             : 
    1373             : } /* namespace jit */
    1374             : } /* namespace js */

Generated by: LCOV version 1.13