DFGByteCodeParser.cpp 160 KB
Newer Older
1
/*
2
 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 */

#include "config.h"
#include "DFGByteCodeParser.h"

#if ENABLE(DFG_JIT)

31
#include "ArrayConstructor.h"
32 33
#include "CallLinkStatus.h"
#include "CodeBlock.h"
34
#include "CodeBlockWithJITType.h"
35
#include "DFGArrayMode.h"
36
#include "DFGCapabilities.h"
37
#include "DFGJITCode.h"
38
#include "GetByIdStatus.h"
39
#include "Operations.h"
40
#include "PreciseJumpTargets.h"
41
#include "PutByIdStatus.h"
42
#include "StringConstructor.h"
43
#include <wtf/CommaPrinter.h>
44
#include <wtf/HashMap.h>
45
#include <wtf/MathExtras.h>
46
#include <wtf/StdLibExtras.h>
47 48 49

namespace JSC { namespace DFG {

50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
class ConstantBufferKey {
public:
    ConstantBufferKey()
        : m_codeBlock(0)
        , m_index(0)
    {
    }
    
    ConstantBufferKey(WTF::HashTableDeletedValueType)
        : m_codeBlock(0)
        , m_index(1)
    {
    }
    
    ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
        : m_codeBlock(codeBlock)
        , m_index(index)
    {
    }
    
    bool operator==(const ConstantBufferKey& other) const
    {
        return m_codeBlock == other.m_codeBlock
            && m_index == other.m_index;
    }
    
    unsigned hash() const
    {
        return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
    }
    
    bool isHashTableDeletedValue() const
    {
        return !m_codeBlock && m_index;
    }
    
    CodeBlock* codeBlock() const { return m_codeBlock; }
    unsigned index() const { return m_index; }
    
private:
    CodeBlock* m_codeBlock;
    unsigned m_index;
};

struct ConstantBufferKeyHash {
    static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
    static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
    {
        return a == b;
    }
    
    static const bool safeToCompareToEmptyOrDeleted = true;
};

} } // namespace JSC::DFG

namespace WTF {

template<typename T> struct DefaultHash;
template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
    typedef JSC::DFG::ConstantBufferKeyHash Hash;
};

template<typename T> struct HashTraits;
template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };

} // namespace WTF

namespace JSC { namespace DFG {

120 121 122 123 124
// === ByteCodeParser ===
//
// This class is used to compile the dataflow graph from a CodeBlock.
class ByteCodeParser {
public:
125
    ByteCodeParser(Graph& graph)
ggaren@apple.com's avatar
ggaren@apple.com committed
126
        : m_vm(&graph.m_vm)
127 128
        , m_codeBlock(graph.m_codeBlock)
        , m_profiledBlock(graph.m_profiledBlock)
129
        , m_graph(graph)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
130
        , m_currentBlock(0)
131
        , m_currentIndex(0)
132
        , m_constantUndefined(UINT_MAX)
133
        , m_constantNull(UINT_MAX)
134
        , m_constantNaN(UINT_MAX)
135
        , m_constant1(UINT_MAX)
136 137 138
        , m_constants(m_codeBlock->numberOfConstantRegisters())
        , m_numArguments(m_codeBlock->numParameters())
        , m_numLocals(m_codeBlock->m_numCalleeRegisters)
139 140
        , m_parameterSlots(0)
        , m_numPassedVarArgs(0)
141 142
        , m_inlineStackTop(0)
        , m_haveBuiltOperandMaps(false)
143
        , m_emptyJSValueIndex(UINT_MAX)
144
        , m_currentInstruction(0)
145
    {
146
        ASSERT(m_profiledBlock);
147
    }
148
    
149
    // Parse a full CodeBlock of bytecode.
150
    bool parse();
151
    
152
private:
153 154
    struct InlineStackEntry;

155
    // Just parse from m_currentIndex to the end of the current CodeBlock.
156
    void parseCodeBlock();
157

158
    // Helper for min and max.
159
    bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
160
    
161
    // Handle calls. This resolves issues surrounding inlining and intrinsics.
162
    void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
163
    void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
164
    void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
165
    // Handle inlining. Return true if it succeeded, false if we need to plant a call.
166
    bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
167
    // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
168
    bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
169
    bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
170
    bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
171
    Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
172
    Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
173
    void handleGetByOffset(
174
        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
175
        PropertyOffset);
176
    void handleGetById(
177
        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
178
        const GetByIdStatus&);
179

180
    Node* getScope(bool skipTop, unsigned skipCount);
181
    
182 183
    // Prepare to parse a block.
    void prepareToParseBlock();
184
    // Parse a single basic block of bytecode instructions.
185
    bool parseBlock(unsigned limit);
186
    // Link block successors.
187 188
    void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
    void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
189
    
190
    VariableAccessData* newVariableAccessData(VirtualRegister operand, bool isCaptured)
191
    {
192
        ASSERT(!operand.isConstant());
193
        
194
        m_graph.m_variableAccessData.append(VariableAccessData(operand, isCaptured));
195 196 197
        return &m_graph.m_variableAccessData.last();
    }
    
198
    // Get/Set the operands/result of a bytecode instruction.
199
    Node* getDirect(VirtualRegister operand)
200 201
    {
        // Is this a constant?
202 203
        if (operand.isConstant()) {
            unsigned constant = operand.toConstantIndex();
204
            ASSERT(constant < m_constants.size());
205 206 207 208
            return getJSConstant(constant);
        }

        // Is this an argument?
209
        if (operand.isArgument())
210
            return getArgument(operand);
211

212
        // Must be a local.
213
        return getLocal(operand);
214
    }
215

216
    Node* get(VirtualRegister operand)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
217
    {
218
        if (inlineCallFrame()) {
fpizlo@apple.com's avatar
fpizlo@apple.com committed
219 220
            if (!inlineCallFrame()->isClosureCall) {
                JSFunction* callee = inlineCallFrame()->calleeConstant();
221 222 223 224 225 226 227 228 229
                if (operand.offset() == JSStack::Callee)
                    return cellConstant(callee);
                if (operand.offset() == JSStack::ScopeChain)
                    return cellConstant(callee->scope());
            }
        } else if (operand.offset() == JSStack::Callee)
            return addToGraph(GetCallee);
        else if (operand.offset() == JSStack::ScopeChain)
            return addToGraph(GetMyScope);
230
        
fpizlo@apple.com's avatar
fpizlo@apple.com committed
231 232
        return getDirect(m_inlineStackTop->remapOperand(operand));
    }
233
    
234
    enum SetMode { NormalSet, SetOnEntry };
235
    Node* setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
236 237
    {
        // Is this an argument?
238 239
        if (operand.isArgument())
            return setArgument(operand, value, setMode);
240

241
        // Must be a local.
242
        return setLocal(operand, value, setMode);
243
    }
244

245
    Node* set(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
246
    {
247
        return setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
248
    }
249
    
250
    Node* injectLazyOperandSpeculation(Node* node)
251
    {
252 253
        ASSERT(node->op() == GetLocal);
        ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
254
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
255 256
        LazyOperandValueProfileKey key(m_currentIndex, node->local());
        SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
257
#if DFG_ENABLE(DEBUG_VERBOSE)
258
        dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
259
#endif
260 261
        node->variableAccessData()->predict(prediction);
        return node;
262
    }
263

264
    // Used in implementing get/set, above, where the operand is a local variable.
265
    Node* getLocal(VirtualRegister operand)
266
    {
267
        unsigned local = operand.toLocal();
268
        Node* node = m_currentBlock->variablesAtTail.local(local);
269
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
270
        
271 272 273 274 275 276 277 278
        // This has two goals: 1) link together variable access datas, and 2)
        // try to avoid creating redundant GetLocals. (1) is required for
        // correctness - no other phase will ensure that block-local variable
        // access data unification is done correctly. (2) is purely opportunistic
        // and is meant as an compile-time optimization only.
        
        VariableAccessData* variable;
        
279
        if (node) {
280 281
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
282
            
283 284 285 286 287 288 289 290 291
            if (!isCaptured) {
                switch (node->op()) {
                case GetLocal:
                    return node;
                case SetLocal:
                    return node->child1().node();
                default:
                    break;
                }
292
            }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
293
        } else
294
            variable = newVariableAccessData(operand, isCaptured);
295
        
296
        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
297
        m_currentBlock->variablesAtTail.local(local) = node;
298
        return node;
299
    }
300

301
    Node* setLocal(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
302
    {
303
        unsigned local = operand.toLocal();
304
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
305
        
306 307 308 309 310 311
        if (setMode == NormalSet) {
            ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
            if (isCaptured || argumentPosition)
                flushDirect(operand, argumentPosition);
        }

312
        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
313 314
        variableAccessData->mergeStructureCheckHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
315 316
        variableAccessData->mergeCheckArrayHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
317
        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
318
        m_currentBlock->variablesAtTail.local(local) = node;
319
        return node;
320 321 322
    }

    // Used in implementing get/set, above, where the operand is an argument.
323
    Node* getArgument(VirtualRegister operand)
324
    {
325
        unsigned argument = operand.toArgument();
326
        ASSERT(argument < m_numArguments);
327
        
328
        Node* node = m_currentBlock->variablesAtTail.argument(argument);
329
        bool isCaptured = m_codeBlock->isCaptured(operand);
330

331 332
        VariableAccessData* variable;
        
333
        if (node) {
334 335
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
336
            
337 338
            switch (node->op()) {
            case GetLocal:
339
                return node;
340 341 342 343
            case SetLocal:
                return node->child1().node();
            default:
                break;
344
            }
345 346
        } else
            variable = newVariableAccessData(operand, isCaptured);
347
        
348
        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
349 350
        m_currentBlock->variablesAtTail.argument(argument) = node;
        return node;
351
    }
352
    Node* setArgument(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
353
    {
354
        unsigned argument = operand.toArgument();
355
        ASSERT(argument < m_numArguments);
356
        
357 358
        bool isCaptured = m_codeBlock->isCaptured(operand);

359
        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
360 361 362 363 364 365 366 367 368

        // Always flush arguments, except for 'this'. If 'this' is created by us,
        // then make sure that it's never unboxed.
        if (argument) {
            if (setMode == NormalSet)
                flushDirect(operand);
        } else if (m_codeBlock->specializationKind() == CodeForConstruct)
            variableAccessData->mergeShouldNeverUnbox(true);
        
369 370
        variableAccessData->mergeStructureCheckHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
371 372
        variableAccessData->mergeCheckArrayHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
373 374
        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
        m_currentBlock->variablesAtTail.argument(argument) = node;
375
        return node;
376
    }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
377
    
378 379 380 381 382 383 384 385
    ArgumentPosition* findArgumentPositionForArgument(int argument)
    {
        InlineStackEntry* stack = m_inlineStackTop;
        while (stack->m_inlineCallFrame)
            stack = stack->m_caller;
        return stack->m_argumentPositions[argument];
    }
    
386
    ArgumentPosition* findArgumentPositionForLocal(VirtualRegister operand)
387 388 389 390 391
    {
        for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
            InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
            if (!inlineCallFrame)
                break;
392
            if (operand.offset() <= static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
393
                continue;
394
            if (operand.offset() == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
395
                continue;
396
            if (operand.offset() > static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize + inlineCallFrame->arguments.size()))
397
                continue;
398
            int argument = VirtualRegister(operand.offset() - inlineCallFrame->stackOffset).toArgument();
399 400 401 402 403
            return stack->m_argumentPositions[argument];
        }
        return 0;
    }
    
404
    ArgumentPosition* findArgumentPosition(VirtualRegister operand)
405
    {
406 407
        if (operand.isArgument())
            return findArgumentPositionForArgument(operand.toArgument());
408 409
        return findArgumentPositionForLocal(operand);
    }
410 411 412

    void addConstant(JSValue value)
    {
413
        unsigned constantIndex = m_codeBlock->addConstantLazily();
414
        initializeLazyWriteBarrierForConstant(
415 416
            m_graph.m_plan.writeBarriers,
            m_codeBlock->constants()[constantIndex],
417
            m_codeBlock,
418
            constantIndex,
419 420 421
            m_codeBlock->ownerExecutable(), 
            value);
    }
422
    
423
    void flush(VirtualRegister operand)
424 425 426 427
    {
        flushDirect(m_inlineStackTop->remapOperand(operand));
    }
    
428
    void flushDirect(VirtualRegister operand)
429 430 431 432
    {
        flushDirect(operand, findArgumentPosition(operand));
    }
    
433
    void flushDirect(VirtualRegister operand, ArgumentPosition* argumentPosition)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
434
    {
435
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
436
        
437
        ASSERT(!operand.isConstant());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
438
        
439 440 441
        Node* node = m_currentBlock->variablesAtTail.operand(operand);
        
        VariableAccessData* variable;
fpizlo@apple.com's avatar
fpizlo@apple.com committed
442
        
443
        if (node) {
444 445 446 447
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
        } else
            variable = newVariableAccessData(operand, isCaptured);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
448
        
449 450
        node = addToGraph(Flush, OpInfo(variable));
        m_currentBlock->variablesAtTail.operand(operand) = node;
451
        if (argumentPosition)
452
            argumentPosition->addVariable(variable);
453
    }
454 455

    void flush(InlineStackEntry* inlineStackEntry)
456 457
    {
        int numArguments;
458
        if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame) {
459
            numArguments = inlineCallFrame->arguments.size();
fpizlo@apple.com's avatar
fpizlo@apple.com committed
460
            if (inlineCallFrame->isClosureCall) {
461 462 463 464
                flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::Callee)));
                flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::ScopeChain)));
            }
        } else
465
            numArguments = inlineStackEntry->m_codeBlock->numParameters();
466
        for (unsigned argument = numArguments; argument-- > 1;)
467
            flushDirect(inlineStackEntry->remapOperand(virtualRegisterForArgument(argument)));
468
        for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
469
            if (!inlineStackEntry->m_codeBlock->isCaptured(virtualRegisterForLocal(local)))
470
                continue;
471
            flushDirect(inlineStackEntry->remapOperand(virtualRegisterForLocal(local)));
472
        }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
473
    }
474

475 476 477 478 479 480 481 482 483 484 485
    void flushAllArgumentsAndCapturedVariablesInInlineStack()
    {
        for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
            flush(inlineStackEntry);
    }

    void flushArgumentsAndCapturedVariables()
    {
        flush(m_inlineStackTop);
    }

486
    // Get an operand, and perform a ToInt32/ToNumber conversion on it.
487
    Node* getToInt32(int operand)
488
    {
489
        return toInt32(get(VirtualRegister(operand)));
490 491 492
    }

    // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
493
    Node* toInt32(Node* node)
494
    {
495 496
        if (node->hasInt32Result())
            return node;
497

498 499
        if (node->op() == UInt32ToNumber)
            return node->child1().node();
500 501

        // Check for numeric constants boxed as JSValues.
502
        if (canFold(node)) {
503
            JSValue v = valueOfJSConstant(node);
504
            if (v.isInt32())
505
                return getJSConstant(node->constantNumber());
506 507
            if (v.isNumber())
                return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
508 509
        }

510
        return addToGraph(ValueToInt32, node);
511 512
    }

513 514 515 516
    // NOTE: Only use this to construct constants that arise from non-speculative
    // constant folding. I.e. creating constants using this if we had constant
    // field inference would be a bad idea, since the bytecode parser's folding
    // doesn't handle liveness preservation.
517
    Node* getJSConstantForValue(JSValue constantValue)
518
    {
519 520 521
        unsigned constantIndex;
        if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
            addConstant(constantValue);
522
            m_constants.append(ConstantRecord());
523
        }
524 525 526 527 528
        
        ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
        
        return getJSConstant(constantIndex);
    }
529

530
    Node* getJSConstant(unsigned constant)
531
    {
532 533 534
        Node* node = m_constants[constant].asJSValue;
        if (node)
            return node;
535

536 537 538
        Node* result = addToGraph(JSConstant, OpInfo(constant));
        m_constants[constant].asJSValue = result;
        return result;
539 540 541
    }

    // Helper functions to get/set the this value.
542
    Node* getThis()
543
    {
544
        return get(m_inlineStackTop->m_codeBlock->thisRegister());
545
    }
546

547
    void setThis(Node* value)
548
    {
549
        set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
550 551 552
    }

    // Convenience methods for checking nodes for constants.
553
    bool isJSConstant(Node* node)
554
    {
555
        return node->op() == JSConstant;
556
    }
557
    bool isInt32Constant(Node* node)
558
    {
559
        return isJSConstant(node) && valueOfJSConstant(node).isInt32();
560
    }
561
    // Convenience methods for getting constant values.
562
    JSValue valueOfJSConstant(Node* node)
563
    {
564 565
        ASSERT(isJSConstant(node));
        return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
566
    }
567
    int32_t valueOfInt32Constant(Node* node)
568
    {
569 570
        ASSERT(isInt32Constant(node));
        return valueOfJSConstant(node).asInt32();
571
    }
572
    
573
    // This method returns a JSConstant with the value 'undefined'.
574
    Node* constantUndefined()
575 576 577 578 579 580 581 582 583 584 585
    {
        // Has m_constantUndefined been set up yet?
        if (m_constantUndefined == UINT_MAX) {
            // Search the constant pool for undefined, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
                if (testMe.isUndefined())
                    return getJSConstant(m_constantUndefined);
            }

586 587
            // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
588
            addConstant(jsUndefined());
589 590
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
591 592 593 594 595 596 597
        }

        // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
        return getJSConstant(m_constantUndefined);
    }

598
    // This method returns a JSConstant with the value 'null'.
599
    Node* constantNull()
600 601 602 603 604 605 606 607 608 609 610
    {
        // Has m_constantNull been set up yet?
        if (m_constantNull == UINT_MAX) {
            // Search the constant pool for null, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
                if (testMe.isNull())
                    return getJSConstant(m_constantNull);
            }

611 612
            // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
613
            addConstant(jsNull());
614 615
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
616 617 618 619 620 621 622
        }

        // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
        return getJSConstant(m_constantNull);
    }

623
    // This method returns a DoubleConstant with the value 1.
624
    Node* one()
625 626 627 628 629 630 631 632
    {
        // Has m_constant1 been set up yet?
        if (m_constant1 == UINT_MAX) {
            // Search the constant pool for the value 1, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
                if (testMe.isInt32() && testMe.asInt32() == 1)
633
                    return getJSConstant(m_constant1);
634 635
            }

636 637
            // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
638
            addConstant(jsNumber(1));
639 640
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
641 642 643 644 645
        }

        // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
646
        return getJSConstant(m_constant1);
647
    }
648
    
649
    // This method returns a DoubleConstant with the value NaN.
650
    Node* constantNaN()
651 652 653 654 655 656 657 658 659 660 661 662 663 664 665
    {
        JSValue nan = jsNaN();
        
        // Has m_constantNaN been set up yet?
        if (m_constantNaN == UINT_MAX) {
            // Search the constant pool for the value NaN, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
                if (JSValue::encode(testMe) == JSValue::encode(nan))
                    return getJSConstant(m_constantNaN);
            }

            // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
666
            addConstant(nan);
667 668 669 670 671 672
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
        }

        // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
673
        ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
674 675 676
        return getJSConstant(m_constantNaN);
    }
    
677
    Node* cellConstant(JSCell* cell)
678
    {
679
        HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
680
        if (result.isNewEntry)
681
            result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
682
        
683
        return result.iterator->value;
684 685
    }
    
686
    InlineCallFrame* inlineCallFrame()
687
    {
688
        return m_inlineStackTop->m_inlineCallFrame;
689
    }
690

691 692
    CodeOrigin currentCodeOrigin()
    {
693
        return CodeOrigin(m_currentIndex, inlineCallFrame());
694 695 696 697 698 699
    }
    
    bool canFold(Node* node)
    {
        return node->isStronglyProvedConstantIn(inlineCallFrame());
    }
700 701 702

    // Our codegen for constant strict equality performs a bitwise comparison,
    // so we can only select values that have a consistent bitwise identity.
703 704 705 706 707 708 709 710
    bool isConstantForCompareStrictEq(Node* node)
    {
        if (!node->isConstant())
            return false;
        JSValue value = valueOfJSConstant(node);
        return value.isBoolean() || value.isUndefinedOrNull();
    }
    
711
    Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
712 713
    {
        Node* result = m_graph.addNode(
714
            SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
715 716 717 718 719
        ASSERT(op != Phi);
        m_currentBlock->append(result);
        return result;
    }
    Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
720
    {
721
        Node* result = m_graph.addNode(
722
            SpecNone, op, currentCodeOrigin(), child1, child2, child3);
723
        ASSERT(op != Phi);
724 725
        m_currentBlock->append(result);
        return result;
726
    }
727
    Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
728
    {
729
        Node* result = m_graph.addNode(
730
            SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
731 732
        ASSERT(op != Phi);
        m_currentBlock->append(result);
733
        return result;
734
    }
735
    Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
736
    {
737
        Node* result = m_graph.addNode(
738 739
            SpecNone, op, currentCodeOrigin(), info1, info2,
            Edge(child1), Edge(child2), Edge(child3));
740
        ASSERT(op != Phi);
741 742
        m_currentBlock->append(result);
        return result;
743
    }
744
    
745
    Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
746
    {
747
        Node* result = m_graph.addNode(
748
            SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
749
            m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
750
        ASSERT(op != Phi);
751
        m_currentBlock->append(result);
752 753 754
        
        m_numPassedVarArgs = 0;
        
755
        return result;
756
    }
757

758
    void addVarArgChild(Node* child)
759
    {
760
        m_graph.m_varArgChildren.append(Edge(child));
761 762
        m_numPassedVarArgs++;
    }
763
    
764
    Node* addCall(Instruction* currentInstruction, NodeType op)
765
    {
766
        SpeculatedType prediction = getPrediction();
767
        
768
        addVarArgChild(get(VirtualRegister(currentInstruction[2].u.operand)));
769
        int argCount = currentInstruction[3].u.operand;
770 771
        if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
            m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
772

773
        int registerOffset = -currentInstruction[4].u.operand;
774 775
        int dummyThisArgument = op == Call ? 0 : 1;
        for (int i = 0 + dummyThisArgument; i < argCount; ++i)
776
            addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
777

778
        Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
779
        set(VirtualRegister(currentInstruction[1].u.operand), call);
780 781
        return call;
    }
782
    
783
    Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
784
    {
785 786 787
        Node* objectNode = cellConstant(object);
        addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
        return objectNode;
788 789
    }
    
790
    Node* cellConstantWithStructureCheck(JSCell* object)
791
    {
792
        return cellConstantWithStructureCheck(object, object->structure());
793
    }
794

795
    SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
796
    {
797
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
798
        return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
799 800
    }

801
    SpeculatedType getPrediction(unsigned bytecodeIndex)
802
    {
803
        SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
804
        
805
        if (prediction == SpecNone) {
806 807 808 809 810
            // We have no information about what values this node generates. Give up
            // on executing this code, since we're likely to do more damage than good.
            addToGraph(ForceOSRExit);
        }
        
811
        return prediction;
812 813
    }
    
814
    SpeculatedType getPredictionWithoutOSRExit()
815
    {
816
        return getPredictionWithoutOSRExit(m_currentIndex);