DFGByteCodeParser.cpp 162 KB
Newer Older
1
/*
2
 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 */

#include "config.h"
#include "DFGByteCodeParser.h"

#if ENABLE(DFG_JIT)

31
#include "ArrayConstructor.h"
32 33
#include "CallLinkStatus.h"
#include "CodeBlock.h"
34
#include "CodeBlockWithJITType.h"
35
#include "DFGArrayMode.h"
36
#include "DFGCapabilities.h"
37
#include "DFGJITCode.h"
38
#include "GetByIdStatus.h"
fpizlo@apple.com's avatar
fpizlo@apple.com committed
39
#include "JSActivation.h"
40
#include "Operations.h"
41
#include "PreciseJumpTargets.h"
42
#include "PutByIdStatus.h"
43
#include "StringConstructor.h"
44
#include <wtf/CommaPrinter.h>
45
#include <wtf/HashMap.h>
46
#include <wtf/MathExtras.h>
47
#include <wtf/StdLibExtras.h>
48 49 50

namespace JSC { namespace DFG {

51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
class ConstantBufferKey {
public:
    ConstantBufferKey()
        : m_codeBlock(0)
        , m_index(0)
    {
    }
    
    ConstantBufferKey(WTF::HashTableDeletedValueType)
        : m_codeBlock(0)
        , m_index(1)
    {
    }
    
    ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
        : m_codeBlock(codeBlock)
        , m_index(index)
    {
    }
    
    bool operator==(const ConstantBufferKey& other) const
    {
        return m_codeBlock == other.m_codeBlock
            && m_index == other.m_index;
    }
    
    unsigned hash() const
    {
        return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
    }
    
    bool isHashTableDeletedValue() const
    {
        return !m_codeBlock && m_index;
    }
    
    CodeBlock* codeBlock() const { return m_codeBlock; }
    unsigned index() const { return m_index; }
    
private:
    CodeBlock* m_codeBlock;
    unsigned m_index;
};

struct ConstantBufferKeyHash {
    static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
    static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
    {
        return a == b;
    }
    
    static const bool safeToCompareToEmptyOrDeleted = true;
};

} } // namespace JSC::DFG

namespace WTF {

template<typename T> struct DefaultHash;
template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
    typedef JSC::DFG::ConstantBufferKeyHash Hash;
};

template<typename T> struct HashTraits;
template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };

} // namespace WTF

namespace JSC { namespace DFG {

121 122 123 124 125
// === ByteCodeParser ===
//
// This class is used to compile the dataflow graph from a CodeBlock.
class ByteCodeParser {
public:
126
    ByteCodeParser(Graph& graph)
ggaren@apple.com's avatar
ggaren@apple.com committed
127
        : m_vm(&graph.m_vm)
128 129
        , m_codeBlock(graph.m_codeBlock)
        , m_profiledBlock(graph.m_profiledBlock)
130
        , m_graph(graph)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
131
        , m_currentBlock(0)
132
        , m_currentIndex(0)
133
        , m_constantUndefined(UINT_MAX)
134
        , m_constantNull(UINT_MAX)
135
        , m_constantNaN(UINT_MAX)
136
        , m_constant1(UINT_MAX)
137 138 139
        , m_constants(m_codeBlock->numberOfConstantRegisters())
        , m_numArguments(m_codeBlock->numParameters())
        , m_numLocals(m_codeBlock->m_numCalleeRegisters)
140 141
        , m_parameterSlots(0)
        , m_numPassedVarArgs(0)
142 143
        , m_inlineStackTop(0)
        , m_haveBuiltOperandMaps(false)
144
        , m_emptyJSValueIndex(UINT_MAX)
145
        , m_currentInstruction(0)
146
    {
147
        ASSERT(m_profiledBlock);
148
    }
149
    
150
    // Parse a full CodeBlock of bytecode.
151
    bool parse();
152
    
153
private:
154 155
    struct InlineStackEntry;

156
    // Just parse from m_currentIndex to the end of the current CodeBlock.
157
    void parseCodeBlock();
158

159
    // Helper for min and max.
160
    bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
161
    
162
    // Handle calls. This resolves issues surrounding inlining and intrinsics.
163
    void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
164
    void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
165
    void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
166
    // Handle inlining. Return true if it succeeded, false if we need to plant a call.
167
    bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
168
    // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
169
    bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
170
    bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
171
    bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
172
    Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
173
    Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
174
    void handleGetByOffset(
175
        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
176
        PropertyOffset);
177
    void handleGetById(
178
        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179
        const GetByIdStatus&);
180

181
    Node* getScope(bool skipTop, unsigned skipCount);
182
    
183 184
    // Prepare to parse a block.
    void prepareToParseBlock();
185
    // Parse a single basic block of bytecode instructions.
186
    bool parseBlock(unsigned limit);
187
    // Link block successors.
188 189
    void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
    void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
190
    
191
    VariableAccessData* newVariableAccessData(VirtualRegister operand, bool isCaptured)
192
    {
193
        ASSERT(!operand.isConstant());
194
        
195
        m_graph.m_variableAccessData.append(VariableAccessData(operand, isCaptured));
196 197 198
        return &m_graph.m_variableAccessData.last();
    }
    
199
    // Get/Set the operands/result of a bytecode instruction.
200
    Node* getDirect(VirtualRegister operand)
201 202
    {
        // Is this a constant?
203 204
        if (operand.isConstant()) {
            unsigned constant = operand.toConstantIndex();
205
            ASSERT(constant < m_constants.size());
206 207 208 209
            return getJSConstant(constant);
        }

        // Is this an argument?
210
        if (operand.isArgument())
211
            return getArgument(operand);
212

213
        // Must be a local.
214
        return getLocal(operand);
215
    }
216

217
    Node* get(VirtualRegister operand)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
218
    {
219
        if (inlineCallFrame()) {
fpizlo@apple.com's avatar
fpizlo@apple.com committed
220 221
            if (!inlineCallFrame()->isClosureCall) {
                JSFunction* callee = inlineCallFrame()->calleeConstant();
222 223 224 225 226 227 228 229 230
                if (operand.offset() == JSStack::Callee)
                    return cellConstant(callee);
                if (operand.offset() == JSStack::ScopeChain)
                    return cellConstant(callee->scope());
            }
        } else if (operand.offset() == JSStack::Callee)
            return addToGraph(GetCallee);
        else if (operand.offset() == JSStack::ScopeChain)
            return addToGraph(GetMyScope);
231
        
fpizlo@apple.com's avatar
fpizlo@apple.com committed
232 233
        return getDirect(m_inlineStackTop->remapOperand(operand));
    }
234
    
235
    enum SetMode { NormalSet, ImmediateSet };
236
    Node* setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
237
    {
238 239 240 241 242 243 244 245 246 247
        addToGraph(MovHint, OpInfo(operand.offset()), value);
        
        DelayedSetLocal delayed = DelayedSetLocal(operand, value);
        
        if (setMode == NormalSet) {
            m_setLocalQueue.append(delayed);
            return 0;
        }
        
        return delayed.execute(this, setMode);
248
    }
249

250
    Node* set(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
251
    {
252
        return setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
253
    }
254
    
255
    Node* injectLazyOperandSpeculation(Node* node)
256
    {
257 258
        ASSERT(node->op() == GetLocal);
        ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
259
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
260 261
        LazyOperandValueProfileKey key(m_currentIndex, node->local());
        SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
262 263
        node->variableAccessData()->predict(prediction);
        return node;
264
    }
265

266
    // Used in implementing get/set, above, where the operand is a local variable.
267
    Node* getLocal(VirtualRegister operand)
268
    {
269
        unsigned local = operand.toLocal();
270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287

        if (local < m_localWatchpoints.size()) {
            if (VariableWatchpointSet* set = m_localWatchpoints[local]) {
                if (JSValue value = set->inferredValue()) {
                    addToGraph(FunctionReentryWatchpoint, OpInfo(m_codeBlock->symbolTable()));
                    addToGraph(VariableWatchpoint, OpInfo(set));
                    // Note: this is very special from an OSR exit standpoint. We wouldn't be
                    // able to do this for most locals, but it works here because we're dealing
                    // with a flushed local. For most locals we would need to issue a GetLocal
                    // here and ensure that we have uses in DFG IR wherever there would have
                    // been uses in bytecode. Clearly this optimization does not do this. But
                    // that's fine, because we don't need to track liveness for captured
                    // locals, and this optimization only kicks in for captured locals.
                    return inferredConstant(value);
                }
            }
        }

288
        Node* node = m_currentBlock->variablesAtTail.local(local);
289
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
290
        
291 292 293 294 295 296 297 298
        // This has two goals: 1) link together variable access datas, and 2)
        // try to avoid creating redundant GetLocals. (1) is required for
        // correctness - no other phase will ensure that block-local variable
        // access data unification is done correctly. (2) is purely opportunistic
        // and is meant as an compile-time optimization only.
        
        VariableAccessData* variable;
        
299
        if (node) {
300 301
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
302
            
303 304 305 306 307 308 309 310 311
            if (!isCaptured) {
                switch (node->op()) {
                case GetLocal:
                    return node;
                case SetLocal:
                    return node->child1().node();
                default:
                    break;
                }
312
            }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
313
        } else
314
            variable = newVariableAccessData(operand, isCaptured);
315
        
316
        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
317
        m_currentBlock->variablesAtTail.local(local) = node;
318
        return node;
319
    }
320

321
    Node* setLocal(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
322
    {
323
        unsigned local = operand.toLocal();
324
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
325
        
326 327 328 329 330 331
        if (setMode == NormalSet) {
            ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
            if (isCaptured || argumentPosition)
                flushDirect(operand, argumentPosition);
        }

332
        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
333
        variableAccessData->mergeStructureCheckHoistingFailed(
334 335
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
            || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
336 337
        variableAccessData->mergeCheckArrayHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
338
        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
339
        m_currentBlock->variablesAtTail.local(local) = node;
340
        return node;
341 342 343
    }

    // Used in implementing get/set, above, where the operand is an argument.
344
    Node* getArgument(VirtualRegister operand)
345
    {
346
        unsigned argument = operand.toArgument();
347
        ASSERT(argument < m_numArguments);
348
        
349
        Node* node = m_currentBlock->variablesAtTail.argument(argument);
350
        bool isCaptured = m_codeBlock->isCaptured(operand);
351

352 353
        VariableAccessData* variable;
        
354
        if (node) {
355 356
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
357
            
358 359
            switch (node->op()) {
            case GetLocal:
360
                return node;
361 362 363 364
            case SetLocal:
                return node->child1().node();
            default:
                break;
365
            }
366 367
        } else
            variable = newVariableAccessData(operand, isCaptured);
368
        
369
        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
370 371
        m_currentBlock->variablesAtTail.argument(argument) = node;
        return node;
372
    }
373
    Node* setArgument(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
374
    {
375
        unsigned argument = operand.toArgument();
376
        ASSERT(argument < m_numArguments);
377
        
378 379
        bool isCaptured = m_codeBlock->isCaptured(operand);

380
        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
381 382 383 384 385 386 387 388 389

        // Always flush arguments, except for 'this'. If 'this' is created by us,
        // then make sure that it's never unboxed.
        if (argument) {
            if (setMode == NormalSet)
                flushDirect(operand);
        } else if (m_codeBlock->specializationKind() == CodeForConstruct)
            variableAccessData->mergeShouldNeverUnbox(true);
        
390
        variableAccessData->mergeStructureCheckHoistingFailed(
391 392
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
            || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
393 394
        variableAccessData->mergeCheckArrayHoistingFailed(
            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
395 396
        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
        m_currentBlock->variablesAtTail.argument(argument) = node;
397
        return node;
398
    }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
399
    
400 401 402 403 404 405 406 407
    ArgumentPosition* findArgumentPositionForArgument(int argument)
    {
        InlineStackEntry* stack = m_inlineStackTop;
        while (stack->m_inlineCallFrame)
            stack = stack->m_caller;
        return stack->m_argumentPositions[argument];
    }
    
408
    ArgumentPosition* findArgumentPositionForLocal(VirtualRegister operand)
409 410 411 412 413
    {
        for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
            InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
            if (!inlineCallFrame)
                break;
414
            if (operand.offset() < static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
415
                continue;
416
            if (operand.offset() == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
417
                continue;
418
            if (operand.offset() >= static_cast<int>(inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset() + inlineCallFrame->arguments.size()))
419
                continue;
420
            int argument = VirtualRegister(operand.offset() - inlineCallFrame->stackOffset).toArgument();
421 422 423 424 425
            return stack->m_argumentPositions[argument];
        }
        return 0;
    }
    
426
    ArgumentPosition* findArgumentPosition(VirtualRegister operand)
427
    {
428 429
        if (operand.isArgument())
            return findArgumentPositionForArgument(operand.toArgument());
430 431
        return findArgumentPositionForLocal(operand);
    }
432 433 434

    void addConstant(JSValue value)
    {
435
        unsigned constantIndex = m_codeBlock->addConstantLazily();
436
        initializeLazyWriteBarrierForConstant(
437 438
            m_graph.m_plan.writeBarriers,
            m_codeBlock->constants()[constantIndex],
439
            m_codeBlock,
440
            constantIndex,
441 442 443
            m_codeBlock->ownerExecutable(), 
            value);
    }
444
    
445
    void flush(VirtualRegister operand)
446 447 448 449
    {
        flushDirect(m_inlineStackTop->remapOperand(operand));
    }
    
450
    void flushDirect(VirtualRegister operand)
451 452 453 454
    {
        flushDirect(operand, findArgumentPosition(operand));
    }
    
455
    void flushDirect(VirtualRegister operand, ArgumentPosition* argumentPosition)
fpizlo@apple.com's avatar
fpizlo@apple.com committed
456
    {
457
        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
458
        
459
        ASSERT(!operand.isConstant());
fpizlo@apple.com's avatar
fpizlo@apple.com committed
460
        
461 462 463
        Node* node = m_currentBlock->variablesAtTail.operand(operand);
        
        VariableAccessData* variable;
fpizlo@apple.com's avatar
fpizlo@apple.com committed
464
        
465
        if (node) {
466 467 468 469
            variable = node->variableAccessData();
            variable->mergeIsCaptured(isCaptured);
        } else
            variable = newVariableAccessData(operand, isCaptured);
fpizlo@apple.com's avatar
fpizlo@apple.com committed
470
        
471 472
        node = addToGraph(Flush, OpInfo(variable));
        m_currentBlock->variablesAtTail.operand(operand) = node;
473
        if (argumentPosition)
474
            argumentPosition->addVariable(variable);
475
    }
476 477

    void flush(InlineStackEntry* inlineStackEntry)
478 479
    {
        int numArguments;
480
        if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame) {
481
            numArguments = inlineCallFrame->arguments.size();
fpizlo@apple.com's avatar
fpizlo@apple.com committed
482
            if (inlineCallFrame->isClosureCall) {
483 484 485 486
                flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::Callee)));
                flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::ScopeChain)));
            }
        } else
487
            numArguments = inlineStackEntry->m_codeBlock->numParameters();
488
        for (unsigned argument = numArguments; argument-- > 1;)
489
            flushDirect(inlineStackEntry->remapOperand(virtualRegisterForArgument(argument)));
490
        for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
491
            if (!inlineStackEntry->m_codeBlock->isCaptured(virtualRegisterForLocal(local)))
492
                continue;
493
            flushDirect(inlineStackEntry->remapOperand(virtualRegisterForLocal(local)));
494
        }
fpizlo@apple.com's avatar
fpizlo@apple.com committed
495
    }
496

497 498 499 500 501 502 503 504 505 506 507
    void flushAllArgumentsAndCapturedVariablesInInlineStack()
    {
        for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
            flush(inlineStackEntry);
    }

    void flushArgumentsAndCapturedVariables()
    {
        flush(m_inlineStackTop);
    }

508 509 510 511
    // NOTE: Only use this to construct constants that arise from non-speculative
    // constant folding. I.e. creating constants using this if we had constant
    // field inference would be a bad idea, since the bytecode parser's folding
    // doesn't handle liveness preservation.
512
    Node* getJSConstantForValue(JSValue constantValue, NodeFlags flags = NodeIsStaticConstant)
513
    {
514 515 516
        unsigned constantIndex;
        if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
            addConstant(constantValue);
517
            m_constants.append(ConstantRecord());
518
        }
519 520 521
        
        ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
        
522
        return getJSConstant(constantIndex, flags);
523
    }
524

525
    Node* getJSConstant(unsigned constant, NodeFlags flags = NodeIsStaticConstant)
526
    {
527 528 529
        Node* node = m_constants[constant].asJSValue;
        if (node)
            return node;
530

531
        Node* result = addToGraph(JSConstant, OpInfo(constant));
532
        result->mergeFlags(flags);
533 534
        m_constants[constant].asJSValue = result;
        return result;
535 536 537
    }

    // Helper functions to get/set the this value.
538
    Node* getThis()
539
    {
540
        return get(m_inlineStackTop->m_codeBlock->thisRegister());
541
    }
542

543
    void setThis(Node* value)
544
    {
545
        set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
546 547 548
    }

    // Convenience methods for checking nodes for constants.
549
    bool isJSConstant(Node* node)
550
    {
551
        return node->op() == JSConstant;
552
    }
553
    bool isInt32Constant(Node* node)
554
    {
555
        return isJSConstant(node) && valueOfJSConstant(node).isInt32();
556
    }
557
    // Convenience methods for getting constant values.
558
    JSValue valueOfJSConstant(Node* node)
559
    {
560 561
        ASSERT(isJSConstant(node));
        return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
562
    }
563
    int32_t valueOfInt32Constant(Node* node)
564
    {
565 566
        ASSERT(isInt32Constant(node));
        return valueOfJSConstant(node).asInt32();
567
    }
568
    
569
    // This method returns a JSConstant with the value 'undefined'.
570
    Node* constantUndefined()
571 572 573 574 575 576 577 578 579 580 581
    {
        // Has m_constantUndefined been set up yet?
        if (m_constantUndefined == UINT_MAX) {
            // Search the constant pool for undefined, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
                if (testMe.isUndefined())
                    return getJSConstant(m_constantUndefined);
            }

582 583
            // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
584
            addConstant(jsUndefined());
585 586
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
587 588 589 590 591 592 593
        }

        // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
        return getJSConstant(m_constantUndefined);
    }

594
    // This method returns a JSConstant with the value 'null'.
595
    Node* constantNull()
596 597 598 599 600 601 602 603 604 605 606
    {
        // Has m_constantNull been set up yet?
        if (m_constantNull == UINT_MAX) {
            // Search the constant pool for null, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
                if (testMe.isNull())
                    return getJSConstant(m_constantNull);
            }

607 608
            // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
609
            addConstant(jsNull());
610 611
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
612 613 614 615 616 617 618
        }

        // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
        return getJSConstant(m_constantNull);
    }

619
    // This method returns a DoubleConstant with the value 1.
620
    Node* one()
621 622 623 624 625 626 627 628
    {
        // Has m_constant1 been set up yet?
        if (m_constant1 == UINT_MAX) {
            // Search the constant pool for the value 1, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
                if (testMe.isInt32() && testMe.asInt32() == 1)
629
                    return getJSConstant(m_constant1);
630 631
            }

632 633
            // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
634
            addConstant(jsNumber(1));
635 636
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
637 638 639 640 641
        }

        // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
642
        return getJSConstant(m_constant1);
643
    }
644
    
645
    // This method returns a DoubleConstant with the value NaN.
646
    Node* constantNaN()
647 648 649 650 651 652 653 654 655 656 657 658 659 660 661
    {
        JSValue nan = jsNaN();
        
        // Has m_constantNaN been set up yet?
        if (m_constantNaN == UINT_MAX) {
            // Search the constant pool for the value NaN, if we find it, we can just reuse this!
            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
            for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
                if (JSValue::encode(testMe) == JSValue::encode(nan))
                    return getJSConstant(m_constantNaN);
            }

            // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
            ASSERT(m_constants.size() == numberOfConstants);
662
            addConstant(nan);
663 664 665 666 667 668
            m_constants.append(ConstantRecord());
            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
        }

        // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
669
        ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
670 671 672
        return getJSConstant(m_constantNaN);
    }
    
673
    Node* cellConstant(JSCell* cell)
674
    {
675
        HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
676
        if (result.isNewEntry)
677
            result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
678
        
679
        return result.iterator->value;
680 681
    }
    
682 683 684 685 686 687 688
    Node* inferredConstant(JSValue value)
    {
        if (value.isCell())
            return cellConstant(value.asCell());
        return getJSConstantForValue(value, 0);
    }
    
689
    InlineCallFrame* inlineCallFrame()
690
    {
691
        return m_inlineStackTop->m_inlineCallFrame;
692
    }
693

694 695
    CodeOrigin currentCodeOrigin()
    {
696
        return CodeOrigin(m_currentIndex, inlineCallFrame());
697 698 699 700
    }
    
    bool canFold(Node* node)
    {
701 702 703 704 705 706 707 708 709
        if (Options::validateFTLOSRExitLiveness()) {
            // The static folding that the bytecode parser does results in the DFG
            // being able to do some DCE that the bytecode liveness analysis would
            // miss. Hence, we disable the static folding if we're validating FTL OSR
            // exit liveness. This may be brutish, but this validator is powerful
            // enough that it's worth it.
            return false;
        }
        
710 711
        return node->isStronglyProvedConstantIn(inlineCallFrame());
    }
712 713 714

    // Our codegen for constant strict equality performs a bitwise comparison,
    // so we can only select values that have a consistent bitwise identity.
715 716 717 718 719 720 721 722
    bool isConstantForCompareStrictEq(Node* node)
    {
        if (!node->isConstant())
            return false;
        JSValue value = valueOfJSConstant(node);
        return value.isBoolean() || value.isUndefinedOrNull();
    }
    
723
    Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
724 725
    {
        Node* result = m_graph.addNode(
726
            SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
727 728 729 730 731
        ASSERT(op != Phi);
        m_currentBlock->append(result);
        return result;
    }
    Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
732
    {
733
        Node* result = m_graph.addNode(
734
            SpecNone, op, currentCodeOrigin(), child1, child2, child3);
735
        ASSERT(op != Phi);
736 737
        m_currentBlock->append(result);
        return result;
738
    }
739
    Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
740
    {
741
        Node* result = m_graph.addNode(
742
            SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
743 744
        ASSERT(op != Phi);
        m_currentBlock->append(result);
745
        return result;
746
    }
747
    Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
748
    {
749
        Node* result = m_graph.addNode(
750 751
            SpecNone, op, currentCodeOrigin(), info1, info2,
            Edge(child1), Edge(child2), Edge(child3));
752
        ASSERT(op != Phi);
753 754
        m_currentBlock->append(result);
        return result;
755
    }
756
    
757
    Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
758
    {
759
        Node* result = m_graph.addNode(
760
            SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
761
            m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
762
        ASSERT(op != Phi);
763
        m_currentBlock->append(result);
764 765 766
        
        m_numPassedVarArgs = 0;
        
767
        return result;
768
    }
769

770
    void addVarArgChild(Node* child)
771
    {
772
        m_graph.m_varArgChildren.append(Edge(child));
773 774
        m_numPassedVarArgs++;
    }
775
    
776
    Node* addCall(Instruction* currentInstruction, NodeType op)
777
    {
778
        SpeculatedType prediction = getPrediction();
779
        
780
        addVarArgChild(get(VirtualRegister(currentInstruction[2].u.operand)));
781
        int argCount = currentInstruction[3].u.operand;
782 783
        if (JSStack::ThisArgument + (unsigned)argCount > m_parameterSlots)
            m_parameterSlots = JSStack::ThisArgument + argCount;
784

785
        int registerOffset = -currentInstruction[4].u.operand;
786 787
        int dummyThisArgument = op == Call ? 0 : 1;
        for (int i = 0 + dummyThisArgument; i < argCount; ++i)
788
            addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
789

790
        Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
791
        set(VirtualRegister(currentInstruction[1].u.operand), call);
792 793
        return call;
    }
794
    
795
    Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
796
    {
797 798 799
        Node* objectNode = cellConstant(object);
        addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
        return objectNode;
800 801
    }
    
802
    Node* cellConstantWithStructureCheck(JSCell* object)
803
    {
804
        return cellConstantWithStructureCheck(object, object->structure());
805
    }
806

807
    SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
808
    {
809
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
810
        return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
811 812
    }

813
    SpeculatedType getPrediction(unsigned bytecodeIndex)
814
    {
815
        SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
816
        
817
        if (prediction == SpecNone) {
818 819 820 821 822
            // We have no information about what values this node generates. Give up
            // on executing this code, since we're likely to do more damage than good.
            addToGraph(ForceOSRExit);
        }
        
823
        return prediction;
824 825
    }
    
826
    SpeculatedType getPredictionWithoutOSRExit()
827
    {
828
        return getPredictionWithoutOSRExit(m_currentIndex);
829 830
    }
    
831
    SpeculatedType getPrediction()
832
    {
833
        return getPrediction(m_currentIndex);
834
    }
835
    
836
    ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
837
    {
838
        ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
839 840
        profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
        return ArrayMode::fromObserved(locker, profile, action, false);