DFGByteCodeParser.cpp 157 KB
Newer Older
1
/*
2
 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 */

#include "config.h"
#include "DFGByteCodeParser.h"

#if ENABLE(DFG_JIT)

31
#include "ArrayConstructor.h"
32 33
#include "CallLinkStatus.h"
#include "CodeBlock.h"
34
#include "CodeBlockWithJITType.h"
35
#include "DFGArrayMode.h"
36
#include "DFGCapabilities.h"
37
#include "DFGJITCode.h"
38
#include "GetByIdStatus.h"
39
#include "Operations.h"
40
#include "PreciseJumpTargets.h"
41
#include "PutByIdStatus.h"
42
#include "StringConstructor.h"
43
#include <wtf/CommaPrinter.h>
44
#include <wtf/HashMap.h>
45
#include <wtf/MathExtras.h>
46
#include <wtf/StdLibExtras.h>
47 48 49

namespace JSC { namespace DFG {

50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
class ConstantBufferKey {
public:
    ConstantBufferKey()
        : m_codeBlock(0)
        , m_index(0)
    {
    }
    
    ConstantBufferKey(WTF::HashTableDeletedValueType)
        : m_codeBlock(0)
        , m_index(1)
    {
    }
    
    ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
        : m_codeBlock(codeBlock)
        , m_index(index)
    {
    }
    
    bool operator==(const ConstantBufferKey& other) const
    {
        return m_codeBlock == other.m_codeBlock
            && m_index == other.m_index;
    }
    
    unsigned hash() const
    {
        return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
    }
    
    bool isHashTableDeletedValue() const
    {
        return !m_codeBlock && m_index;
    }
    
    CodeBlock* codeBlock() const { return m_codeBlock; }
    unsigned index() const { return m_index; }
    
private:
    CodeBlock* m_codeBlock;
    unsigned m_index;
};

struct ConstantBufferKeyHash {
    static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
    static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
    {
        return a == b;
    }
    
    static const bool safeToCompareToEmptyOrDeleted = true;
};

} } // namespace JSC::DFG

namespace WTF {

template<typename T> struct DefaultHash;
template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
    typedef JSC::DFG::ConstantBufferKeyHash Hash;
};

template<typename T> struct HashTraits;
template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };

} // namespace WTF

namespace JSC { namespace DFG {

120 121 122 123 124
// === ByteCodeParser ===
//
// This class is used to compile the dataflow graph from a CodeBlock.
class ByteCodeParser {
public:
125
    ByteCodeParser(Graph& graph)
ggaren@apple.com's avatar
ggaren@apple.com committed
126
        : m_vm(&graph.m_vm)
127 128
        , m_codeBlock(graph.m_codeBlock)
        , m_profiledBlock(graph.m_profiledBlock)
129
        , m_graph(graph)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
130
        , m_currentBlock(0)
131
        , m_currentIndex(0)
132
        , m_constantUndefined(UINT_MAX)
133
        , m_constantNull(UINT_MAX)
134
        , m_constantNaN(UINT_MAX)
135
        , m_constant1(UINT_MAX)
136 137 138 139
        , m_constants(m_codeBlock->numberOfConstantRegisters())
        , m_numArguments(m_codeBlock->numParameters())
        , m_numLocals(m_codeBlock->m_numCalleeRegisters)
        , m_preservedVars(m_codeBlock->m_numVars)
140 141
        , m_parameterSlots(0)
        , m_numPassedVarArgs(0)
142 143
        , m_inlineStackTop(0)
        , m_haveBuiltOperandMaps(false)
144
        , m_emptyJSValueIndex(UINT_MAX)
145
        , m_currentInstruction(0)
146
    {
147
        ASSERT(m_profiledBlock);
148
        
149
        for (int i = 0; i < m_codeBlock->m_numVars; ++i)
150
            m_preservedVars.set(i);
151
    }
152
    
153
    // Parse a full CodeBlock of bytecode.
154
    bool parse();
155
    
156
private:
157 158
    struct InlineStackEntry;

159
    // Just parse from m_currentIndex to the end of the current CodeBlock.
160
    void parseCodeBlock();
161

162
    // Helper for min and max.
163
    bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
164
    
165
    // Handle calls. This resolves issues surrounding inlining and intrinsics.
166
    void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
167
    void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
168
    void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
169
    // Handle inlining. Return true if it succeeded, false if we need to plant a call.
170
    bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
171
    // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
172
    bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
173
    bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
174
    bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
175
    Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
176
    Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
177
    void handleGetByOffset(
178
        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179
        PropertyOffset);
180
    void handleGetById(
181
        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
182
        const GetByIdStatus&);
183

184
    Node* getScope(bool skipTop, unsigned skipCount);
185
    
186 187
    // Prepare to parse a block.
    void prepareToParseBlock();
188
    // Parse a single basic block of bytecode instructions.
189
    bool parseBlock(unsigned limit);
190
    // Link block successors.
191 192
    void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
    void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
193
    
194
    VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
195 196 197
    {
        ASSERT(operand < FirstConstantRegisterIndex);
        
198
        m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
199 200 201
        return &m_graph.m_variableAccessData.last();
    }
    
202
    // Get/Set the operands/result of a bytecode instruction.
203
    Node* getDirect(int operand)
204 205 206 207
    {
        // Is this a constant?
        if (operand >= FirstConstantRegisterIndex) {
            unsigned constant = operand - FirstConstantRegisterIndex;
208
            ASSERT(constant < m_constants.size());
209 210 211
            return getJSConstant(constant);
        }

212
        ASSERT(operand != JSStack::Callee);
213
        
214
        // Is this an argument?
215
        if (operandIsArgument(operand))
216
            return getArgument(operand);
217

218
        // Must be a local.
219
        return getLocal(operand);
220
    }
221

222
    Node* get(int operand)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
223
    {
224
        if (operand == JSStack::Callee) {
225 226
            if (inlineCallFrame() && inlineCallFrame()->callee)
                return cellConstant(inlineCallFrame()->callee.get());
227 228 229 230
            
            return getCallee();
        }
        
fpizlo@apple.com's avatar
fpizlo@apple.com committed
231 232
        return getDirect(m_inlineStackTop->remapOperand(operand));
    }
233

234
    enum SetMode { NormalSet, SetOnEntry };
235
    void setDirect(int operand, Node* value, SetMode setMode = NormalSet)
236 237
    {
        // Is this an argument?
238
        if (operandIsArgument(operand)) {
239
            setArgument(operand, value, setMode);
240
            return;
241 242
        }

243
        // Must be a local.
244
        setLocal(operand, value, setMode);
245
    }
246

247
    void set(int operand, Node* value, SetMode setMode = NormalSet)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
248
    {
249
        setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
250
    }
251
    
252
    Node* injectLazyOperandSpeculation(Node* node)
253
    {
254 255
        ASSERT(node->op() == GetLocal);
        ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
256
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
257 258
        LazyOperandValueProfileKey key(m_currentIndex, node->local());
        SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
259
#if DFG_ENABLE(DEBUG_VERBOSE)
260
        dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
261
#endif
262 263
        node->variableAccessData()->predict(prediction);
        return node;
264
    }
265

266
    // Used in implementing get/set, above, where the operand is a local variable.
267
    Node* getLocal(int operand)
268
    {
269 270
        unsigned local = operandToLocal(operand);
        Node* node = m_currentBlock->variablesAtTail.local(local);
271
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
272
        
273 274 275 276 277 278 279 280
        // This has two goals: 1) link together variable access datas, and 2)
        // try to avoid creating redundant GetLocals. (1) is required for
        // correctness - no other phase will ensure that block-local variable
        // access data unification is done correctly. (2) is purely opportunistic
        // and is meant as an compile-time optimization only.
        
        VariableAccessData* variable;
        
281
        if (node) {
282 283
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
284
            
285 286 287 288 289 290 291 292 293
            if (!isCaptured) {
                switch (node->op()) {
                case GetLocal:
                    return node;
                case SetLocal:
                    return node->child1().node();
                default:
                    break;
                }
294 295
            }
        } else {
296
            m_preservedVars.set(local);
297
            variable = newVariableAccessData(operand, isCaptured);
298
        }
299
        
300
        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
301
        m_currentBlock->variablesAtTail.local(local) = node;
302
        return node;
303
    }
304 305

    void setLocal(int operand, Node* value, SetMode setMode = NormalSet)
306
    {
307
        unsigned local = operandToLocal(operand);
308
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
309
        
310 311 312 313 314 315
        if (setMode == NormalSet) {
            ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
            if (isCaptured || argumentPosition)
                flushDirect(operand, argumentPosition);
        }

316
        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
317 318
        variableAccessData->mergeStructureCheckHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
319 320
        variableAccessData->mergeCheckArrayHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
321
        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
322
        m_currentBlock->variablesAtTail.local(local) = node;
323 324 325
    }

    // Used in implementing get/set, above, where the operand is an argument.
326
    Node* getArgument(unsigned operand)
327
    {
328
        unsigned argument = operandToArgument(operand);
329
        ASSERT(argument < m_numArguments);
330
        
331
        Node* node = m_currentBlock->variablesAtTail.argument(argument);
332
        bool isCaptured = m_codeBlock->isCaptured(operand);
333

334 335
        VariableAccessData* variable;
        
336
        if (node) {
337 338
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
339
            
340 341
            switch (node->op()) {
            case GetLocal:
342
                return node;
343 344 345 346
            case SetLocal:
                return node->child1().node();
            default:
                break;
347
            }
348 349
        } else
            variable = newVariableAccessData(operand, isCaptured);
350
        
351
        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
352 353
        m_currentBlock->variablesAtTail.argument(argument) = node;
        return node;
354
    }
355
    void setArgument(int operand, Node* value, SetMode setMode = NormalSet)
356
    {
357
        unsigned argument = operandToArgument(operand);
358
        ASSERT(argument < m_numArguments);
359
        
360 361
        bool isCaptured = m_codeBlock->isCaptured(operand);

362
        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
363 364 365 366 367 368 369 370 371

        // Always flush arguments, except for 'this'. If 'this' is created by us,
        // then make sure that it's never unboxed.
        if (argument) {
            if (setMode == NormalSet)
                flushDirect(operand);
        } else if (m_codeBlock->specializationKind() == CodeForConstruct)
            variableAccessData->mergeShouldNeverUnbox(true);
        
372 373
        variableAccessData->mergeStructureCheckHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
374 375
        variableAccessData->mergeCheckArrayHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
376 377
        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
        m_currentBlock->variablesAtTail.argument(argument) = node;
378
    }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
379
    
380 381 382 383 384 385 386 387 388 389 390 391 392 393
    ArgumentPosition* findArgumentPositionForArgument(int argument)
    {
        InlineStackEntry* stack = m_inlineStackTop;
        while (stack->m_inlineCallFrame)
            stack = stack->m_caller;
        return stack->m_argumentPositions[argument];
    }
    
    ArgumentPosition* findArgumentPositionForLocal(int operand)
    {
        for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
            InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
            if (!inlineCallFrame)
                break;
394
            if (operand <= static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
395 396 397
                continue;
            if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
                continue;
398
            if (operand > static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize + inlineCallFrame->arguments.size()))
399 400 401 402 403 404 405 406 407 408 409 410 411
                continue;
            int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
            return stack->m_argumentPositions[argument];
        }
        return 0;
    }
    
    ArgumentPosition* findArgumentPosition(int operand)
    {
        if (operandIsArgument(operand))
            return findArgumentPositionForArgument(operandToArgument(operand));
        return findArgumentPositionForLocal(operand);
    }
412 413 414

    void addConstant(JSValue value)
    {
415
        unsigned constantIndex = m_codeBlock->addConstantLazily();
416
        initializeLazyWriteBarrierForConstant(
417 418
            m_graph.m_plan.writeBarriers,
            m_codeBlock->constants()[constantIndex],
419
            m_codeBlock,
420
            constantIndex,
421 422 423
            m_codeBlock->ownerExecutable(), 
            value);
    }
424 425 426 427 428 429 430 431 432 433 434 435
    
    void flush(int operand)
    {
        flushDirect(m_inlineStackTop->remapOperand(operand));
    }
    
    void flushDirect(int operand)
    {
        flushDirect(operand, findArgumentPosition(operand));
    }
    
    void flushDirect(int operand, ArgumentPosition* argumentPosition)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
436
    {
437
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
438 439 440
        
        ASSERT(operand < FirstConstantRegisterIndex);
        
441
        if (!operandIsArgument(operand))
442
            m_preservedVars.set(operandToLocal(operand));
443 444 445 446
        
        Node* node = m_currentBlock->variablesAtTail.operand(operand);
        
        VariableAccessData* variable;
fpizlo@apple.com's avatar
fpizlo@apple.com committed
447
        
448
        if (node) {
449 450 451 452
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
        } else
            variable = newVariableAccessData(operand, isCaptured);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
453
        
454 455
        node = addToGraph(Flush, OpInfo(variable));
        m_currentBlock->variablesAtTail.operand(operand) = node;
456
        if (argumentPosition)
457
            argumentPosition->addVariable(variable);
458
    }
459 460

    void flush(InlineStackEntry* inlineStackEntry)
461 462
    {
        int numArguments;
463 464
        if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
            numArguments = inlineCallFrame->arguments.size();
465
        else
466
            numArguments = inlineStackEntry->m_codeBlock->numParameters();
467
        for (unsigned argument = numArguments; argument-- > 1;)
468 469
            flushDirect(inlineStackEntry->remapOperand(argumentToOperand(argument)));
        for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
470
            if (!inlineStackEntry->m_codeBlock->isCaptured(localToOperand(local)))
471
                continue;
472
            flushDirect(inlineStackEntry->remapOperand(localToOperand(local)));
473
        }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
474
    }
475

476 477 478 479 480 481 482 483 484 485 486
    void flushAllArgumentsAndCapturedVariablesInInlineStack()
    {
        for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
            flush(inlineStackEntry);
    }

    void flushArgumentsAndCapturedVariables()
    {
        flush(m_inlineStackTop);
    }

487
    // Get an operand, and perform a ToInt32/ToNumber conversion on it.
488
    Node* getToInt32(int operand)
489 490 491 492 493
    {
        return toInt32(get(operand));
    }

    // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
494
    Node* toInt32(Node* node)
495
    {
496 497
        if (node->hasInt32Result())
            return node;
498

499 500
        if (node->op() == UInt32ToNumber)
            return node->child1().node();
501 502

        // Check for numeric constants boxed as JSValues.
503
        if (canFold(node)) {
504
            JSValue v = valueOfJSConstant(node);
505
            if (v.isInt32())
506
                return getJSConstant(node->constantNumber());
507 508
            if (v.isNumber())
                return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
509 510
        }

511
        return addToGraph(ValueToInt32, node);
512 513
    }

514 515 516 517
    // NOTE: Only use this to construct constants that arise from non-speculative
    // constant folding. I.e. creating constants using this if we had constant
    // field inference would be a bad idea, since the bytecode parser's folding
    // doesn't handle liveness preservation.
518
    Node* getJSConstantForValue(JSValue constantValue)
519
    {
520 521 522
        unsigned constantIndex;
        if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
            addConstant(constantValue);
523
            m_constants.append(ConstantRecord());
524
        }
525 526 527 528 529
        
        ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
        
        return getJSConstant(constantIndex);
    }
530

531
    Node* getJSConstant(unsigned constant)
532
    {
533 534 535
        Node* node = m_constants[constant].asJSValue;
        if (node)
            return node;
536

537 538 539
        Node* result = addToGraph(JSConstant, OpInfo(constant));
        m_constants[constant].asJSValue = result;
        return result;
540 541
    }

542
    Node* getCallee()
543 544 545 546
    {
        return addToGraph(GetCallee);
    }

547
    // Helper functions to get/set the this value.
548
    Node* getThis()
549
    {
550
        return get(m_inlineStackTop->m_codeBlock->thisRegister());
551
    }
552
    void setThis(Node* value)
553
    {
554
        set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
555 556 557
    }

    // Convenience methods for checking nodes for constants.
558
    bool isJSConstant(Node* node)
559
    {
560
        return node->op() == JSConstant;
561
    }
562
    bool isInt32Constant(Node* node)
563
    {
564
        return isJSConstant(node) && valueOfJSConstant(node).isInt32();
565
    }
566
    // Convenience methods for getting constant values.
567
    JSValue valueOfJSConstant(Node* node)
568
    {
569 570
        ASSERT(isJSConstant(node));
        return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
571
    }
572
    int32_t valueOfInt32Constant(Node* node)
573
    {
574 575
        ASSERT(isInt32Constant(node));
        return valueOfJSConstant(node).asInt32();
576
    }
577
    
578
    // This method returns a JSConstant with the value 'undefined'.
579
    Node* constantUndefined()
580 581 582 583 584 585 586 587 588 589 590
    {
        // Has m_constantUndefined been set up yet?
        if (m_constantUndefined == UINT_MAX) {
            // Search the constant pool for undefined, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
                if (testMe.isUndefined())
                    return getJSConstant(m_constantUndefined);
            }

591 592
            // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
593
            addConstant(jsUndefined());
594 595
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
596 597 598 599 600 601 602
        }

        // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
        return getJSConstant(m_constantUndefined);
    }

603
    // This method returns a JSConstant with the value 'null'.
604
    Node* constantNull()
605 606 607 608 609 610 611 612 613 614 615
    {
        // Has m_constantNull been set up yet?
        if (m_constantNull == UINT_MAX) {
            // Search the constant pool for null, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
                if (testMe.isNull())
                    return getJSConstant(m_constantNull);
            }

616 617
            // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
618
            addConstant(jsNull());
619 620
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
621 622 623 624 625 626 627
        }

        // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
        return getJSConstant(m_constantNull);
    }

628
    // This method returns a DoubleConstant with the value 1.
629
    Node* one()
630 631 632 633 634 635 636 637
    {
        // Has m_constant1 been set up yet?
        if (m_constant1 == UINT_MAX) {
            // Search the constant pool for the value 1, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
                if (testMe.isInt32() && testMe.asInt32() == 1)
638
                    return getJSConstant(m_constant1);
639 640
            }

641 642
            // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
643
            addConstant(jsNumber(1));
644 645
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
646 647 648 649 650
        }

        // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
651
        return getJSConstant(m_constant1);
652
    }
653
    
654
    // This method returns a DoubleConstant with the value NaN.
655
    Node* constantNaN()
656 657 658 659 660 661 662 663 664 665 666 667 668 669 670
    {
        JSValue nan = jsNaN();
        
        // Has m_constantNaN been set up yet?
        if (m_constantNaN == UINT_MAX) {
            // Search the constant pool for the value NaN, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
                if (JSValue::encode(testMe) == JSValue::encode(nan))
                    return getJSConstant(m_constantNaN);
            }

            // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
671
            addConstant(nan);
672 673 674 675 676 677
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
        }

        // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
678
        ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
679 680 681
        return getJSConstant(m_constantNaN);
    }
    
682
    Node* cellConstant(JSCell* cell)
683
    {
684
        HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
685
        if (result.isNewEntry)
686
            result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
687
        
688
        return result.iterator->value;
689 690
    }
    
691
    InlineCallFrame* inlineCallFrame()
692
    {
693
        return m_inlineStackTop->m_inlineCallFrame;
694
    }
695

696 697
    CodeOrigin currentCodeOrigin()
    {
698
        return CodeOrigin(m_currentIndex, inlineCallFrame());
699 700 701 702 703 704
    }
    
    bool canFold(Node* node)
    {
        return node->isStronglyProvedConstantIn(inlineCallFrame());
    }
705 706 707

    // Our codegen for constant strict equality performs a bitwise comparison,
    // so we can only select values that have a consistent bitwise identity.
708 709 710 711 712 713 714 715
    bool isConstantForCompareStrictEq(Node* node)
    {
        if (!node->isConstant())
            return false;
        JSValue value = valueOfJSConstant(node);
        return value.isBoolean() || value.isUndefinedOrNull();
    }
    
716
    Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
717 718
    {
        Node* result = m_graph.addNode(
719
            SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
720 721 722 723 724
        ASSERT(op != Phi);
        m_currentBlock->append(result);
        return result;
    }
    Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
725
    {
726
        Node* result = m_graph.addNode(
727
            SpecNone, op, currentCodeOrigin(), child1, child2, child3);
728
        ASSERT(op != Phi);
729 730
        m_currentBlock->append(result);
        return result;
731
    }
732
    Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
733
    {
734
        Node* result = m_graph.addNode(
735
            SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
736 737
        ASSERT(op != Phi);
        m_currentBlock->append(result);
738
        return result;
739
    }
740
    Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
741
    {
742
        Node* result = m_graph.addNode(
743 744
            SpecNone, op, currentCodeOrigin(), info1, info2,
            Edge(child1), Edge(child2), Edge(child3));
745
        ASSERT(op != Phi);
746 747
        m_currentBlock->append(result);
        return result;
748
    }
749
    
750
    Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
751
    {
752
        Node* result = m_graph.addNode(
753
            SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
754
            m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
755
        ASSERT(op != Phi);
756
        m_currentBlock->append(result);
757 758 759
        
        m_numPassedVarArgs = 0;
        
760
        return result;
761
    }
762

763
    void addVarArgChild(Node* child)
764
    {
765
        m_graph.m_varArgChildren.append(Edge(child));
766 767
        m_numPassedVarArgs++;
    }
768
    
769
    Node* addCall(Instruction* currentInstruction, NodeType op)
770
    {
771
        SpeculatedType prediction = getPrediction();
772
        
773 774
        addVarArgChild(get(currentInstruction[2].u.operand));
        int argCount = currentInstruction[3].u.operand;
775 776
        if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
            m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
777

778
        int registerOffset = -currentInstruction[4].u.operand;
779 780 781 782
        int dummyThisArgument = op == Call ? 0 : 1;
        for (int i = 0 + dummyThisArgument; i < argCount; ++i)
            addVarArgChild(get(registerOffset + argumentToOperand(i)));

783
        Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
784
        set(currentInstruction[1].u.operand, call);
785 786
        return call;
    }
787
    
788
    Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
789
    {
790 791 792
        Node* objectNode = cellConstant(object);
        addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
        return objectNode;
793 794
    }
    
795
    Node* cellConstantWithStructureCheck(JSCell* object)
796
    {
797
        return cellConstantWithStructureCheck(object, object->structure());
798
    }
799

800
    SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
801
    {
802
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
803
        return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
804 805
    }

806
    SpeculatedType getPrediction(unsigned bytecodeIndex)
807
    {
808
        SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
809
        
810
        if (prediction == SpecNone) {
811 812 813 814 815
            // We have no information about what values this node generates. Give up
            // on executing this code, since we're likely to do more damage than good.
            addToGraph(ForceOSRExit);
        }
        
816
        return prediction;
817 818
    }
    
819
    SpeculatedType getPredictionWithoutOSRExit()
820
    {
821
        return getPredictionWithoutOSRExit(m_currentIndex);
822 823
    }
    
824
    SpeculatedType getPrediction()
825
    {
826
        return getPrediction(m_currentIndex);
827
    }
828
    
829
    ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
830
    {
831
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
832 833
        profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
        return ArrayMode::fromObserved(locker, profile, action, false);
834 835 836 837 838
    }
    
    ArrayMode getArrayMode(ArrayProfile* profile)
    {
        return getArrayMode(profile, Array::Read);
839 840
    }
    
841
    ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
842
    {
843
        ConcurrentJITLocker