compile.hpp 54.7 KB
Newer Older
D
duke 已提交
1
/*
S
sla 已提交
2
 * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
D
duke 已提交
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 *
 * This code is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License version 2 only, as
 * published by the Free Software Foundation.
 *
 * This code is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * version 2 for more details (a copy is included in the LICENSE file that
 * accompanied this code).
 *
 * You should have received a copy of the GNU General Public License version
 * 2 along with this work; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 *
19 20 21
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 * or visit www.oracle.com if you need additional information or have any
 * questions.
D
duke 已提交
22 23 24
 *
 */

25 26 27 28 29 30 31 32
#ifndef SHARE_VM_OPTO_COMPILE_HPP
#define SHARE_VM_OPTO_COMPILE_HPP

#include "asm/codeBuffer.hpp"
#include "ci/compilerInterface.hpp"
#include "code/debugInfoRec.hpp"
#include "code/exceptionHandlerTable.hpp"
#include "compiler/compilerOracle.hpp"
33
#include "compiler/compileBroker.hpp"
34 35 36 37 38
#include "libadt/dict.hpp"
#include "libadt/port.hpp"
#include "libadt/vectset.hpp"
#include "memory/resourceArea.hpp"
#include "opto/idealGraphPrinter.hpp"
S
sla 已提交
39
#include "opto/phasetype.hpp"
40 41 42 43
#include "opto/phase.hpp"
#include "opto/regmask.hpp"
#include "runtime/deoptimization.hpp"
#include "runtime/vmThread.hpp"
S
sla 已提交
44
#include "trace/tracing.hpp"
45

D
duke 已提交
46 47 48 49 50 51 52 53
class Block;
class Bundle;
class C2Compiler;
class CallGenerator;
class ConnectionGraph;
class InlineTree;
class Int_Array;
class Matcher;
54 55
class MachConstantNode;
class MachConstantBaseNode;
D
duke 已提交
56
class MachNode;
57
class MachOper;
58
class MachSafePointNode;
D
duke 已提交
59 60 61 62 63 64
class Node;
class Node_Array;
class Node_Notes;
class OptoReg;
class PhaseCFG;
class PhaseGVN;
65
class PhaseIterGVN;
D
duke 已提交
66 67 68 69 70 71 72 73 74
class PhaseRegAlloc;
class PhaseCCP;
class PhaseCCP_DCE;
class RootNode;
class relocInfo;
class Scope;
class StartNode;
class SafePointNode;
class JVMState;
75
class Type;
D
duke 已提交
76 77
class TypeData;
class TypePtr;
78
class TypeOopPtr;
D
duke 已提交
79 80 81 82
class TypeFunc;
class Unique_Node_List;
class nmethod;
class WarmCallInfo;
83 84
class Node_Stack;
struct Final_Reshape_Counts;
D
duke 已提交
85 86 87 88 89

//------------------------------Compile----------------------------------------
// This class defines a top-level Compiler invocation.

class Compile : public Phase {
N
never 已提交
90 91
  friend class VMStructs;

D
duke 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107
 public:
  // Fixed alias indexes.  (See also MergeMemNode.)
  enum {
    AliasIdxTop = 1,  // pseudo-index, aliases to nothing (used as sentinel value)
    AliasIdxBot = 2,  // pseudo-index, aliases to everything
    AliasIdxRaw = 3   // hard-wired index for TypeRawPtr::BOTTOM
  };

  // Variant of TraceTime(NULL, &_t_accumulator, TimeCompiler);
  // Integrated with logging.  If logging is turned on, and dolog is true,
  // then brackets are put into the log, with time stamps and node counts.
  // (The time collection itself is always conditionalized on TimeCompiler.)
  class TracePhase : public TraceTime {
   private:
    Compile*    C;
    CompileLog* _log;
108 109
    const char* _phase_name;
    bool _dolog;
D
duke 已提交
110 111 112 113 114 115 116 117 118 119 120 121 122
   public:
    TracePhase(const char* name, elapsedTimer* accumulator, bool dolog);
    ~TracePhase();
  };

  // Information per category of alias (memory slice)
  class AliasType {
   private:
    friend class Compile;

    int             _index;         // unique index, used with MergeMemNode
    const TypePtr*  _adr_type;      // normalized address type
    ciField*        _field;         // relevant instance field, or null if none
123
    const Type*     _element;       // relevant array element type, or null if none
D
duke 已提交
124 125 126 127 128 129 130 131 132 133
    bool            _is_rewritable; // false if the memory is write-once only
    int             _general_index; // if this is type is an instance, the general
                                    // type that this is an instance of

    void Init(int i, const TypePtr* at);

   public:
    int             index()         const { return _index; }
    const TypePtr*  adr_type()      const { return _adr_type; }
    ciField*        field()         const { return _field; }
134
    const Type*     element()       const { return _element; }
D
duke 已提交
135 136 137 138 139 140 141 142
    bool            is_rewritable() const { return _is_rewritable; }
    bool            is_volatile()   const { return (_field ? _field->is_volatile() : false); }
    int             general_index() const { return (_general_index != 0) ? _general_index : _index; }

    void set_rewritable(bool z) { _is_rewritable = z; }
    void set_field(ciField* f) {
      assert(!_field,"");
      _field = f;
143 144 145 146 147 148 149 150
      if (f->is_final() || f->is_stable()) {
        // In the case of @Stable, multiple writes are possible but may be assumed to be no-ops.
        _is_rewritable = false;
      }
    }
    void set_element(const Type* e) {
      assert(_element == NULL, "");
      _element = e;
D
duke 已提交
151 152 153 154 155 156 157 158 159 160 161
    }

    void print_on(outputStream* st) PRODUCT_RETURN;
  };

  enum {
    logAliasCacheSize = 6,
    AliasCacheSize = (1<<logAliasCacheSize)
  };
  struct AliasCacheEntry { const TypePtr* _adr_type; int _index; };  // simple duple type
  enum {
162
    trapHistLength = MethodData::_trap_hist_limit
D
duke 已提交
163 164
  };

165 166 167 168
  // Constant entry of the constant table.
  class Constant {
  private:
    BasicType _type;
169
    union {
170
      jvalue    _value;
171 172
      Metadata* _metadata;
    } _v;
173
    int       _offset;         // offset of this constant (in bytes) relative to the constant table base.
174
    float     _freq;
175 176 177
    bool      _can_be_reused;  // true (default) if the value can be shared with other users.

  public:
178
    Constant() : _type(T_ILLEGAL), _offset(-1), _freq(0.0f), _can_be_reused(true) { _v._value.l = 0; }
179
    Constant(BasicType type, jvalue value, float freq = 0.0f, bool can_be_reused = true) :
180 181
      _type(type),
      _offset(-1),
182
      _freq(freq),
183
      _can_be_reused(can_be_reused)
184 185 186 187 188 189 190 191 192 193 194 195
    {
      assert(type != T_METADATA, "wrong constructor");
      _v._value = value;
    }
    Constant(Metadata* metadata, bool can_be_reused = true) :
      _type(T_METADATA),
      _offset(-1),
      _freq(0.0f),
      _can_be_reused(can_be_reused)
    {
      _v._metadata = metadata;
    }
196 197 198 199 200

    bool operator==(const Constant& other);

    BasicType type()      const    { return _type; }

201 202 203 204 205 206
    jlong   get_jlong()   const    { return _v._value.j; }
    jfloat  get_jfloat()  const    { return _v._value.f; }
    jdouble get_jdouble() const    { return _v._value.d; }
    jobject get_jobject() const    { return _v._value.l; }

    Metadata* get_metadata() const { return _v._metadata; }
207 208 209 210

    int         offset()  const    { return _offset; }
    void    set_offset(int offset) {        _offset = offset; }

211 212 213
    float       freq()    const    { return _freq;         }
    void    inc_freq(float freq)   {        _freq += freq; }

214 215 216 217 218 219 220 221 222
    bool    can_be_reused() const  { return _can_be_reused; }
  };

  // Constant table.
  class ConstantTable {
  private:
    GrowableArray<Constant> _constants;          // Constants of this table.
    int                     _size;               // Size in bytes the emitted constant table takes (including padding).
    int                     _table_base_offset;  // Offset of the table base that gets added to the constant offsets.
223 224 225 226 227 228 229 230
    int                     _nof_jump_tables;    // Number of jump-tables in this constant table.

    static int qsort_comparator(Constant* a, Constant* b);

    // We use negative frequencies to keep the order of the
    // jump-tables in which they were added.  Otherwise we get into
    // trouble with relocation.
    float next_jump_table_freq() { return -1.0f * (++_nof_jump_tables); }
231 232 233 234

  public:
    ConstantTable() :
      _size(-1),
235 236
      _table_base_offset(-1),  // We can use -1 here since the constant table is always bigger than 2 bytes (-(size / 2), see MachConstantBaseNode::emit).
      _nof_jump_tables(0)
237 238
    {}

239
    int size() const { assert(_size != -1, "not calculated yet"); return _size; }
240

241 242 243
    int calculate_table_base_offset() const;  // AD specific
    void set_table_base_offset(int x)  { assert(_table_base_offset == -1 || x == _table_base_offset, "can't change"); _table_base_offset = x; }
    int      table_base_offset() const { assert(_table_base_offset != -1, "not set yet");                      return _table_base_offset; }
244 245 246 247

    void emit(CodeBuffer& cb);

    // Returns the offset of the last entry (the top) of the constant table.
248
    int  top_offset() const { assert(_constants.top().offset() != -1, "not bound yet"); return _constants.top().offset(); }
249 250 251 252 253

    void calculate_offsets_and_size();
    int  find_offset(Constant& con) const;

    void     add(Constant& con);
254
    Constant add(MachConstantNode* n, BasicType type, jvalue value);
255
    Constant add(Metadata* metadata);
256 257
    Constant add(MachConstantNode* n, MachOper* oper);
    Constant add(MachConstantNode* n, jfloat f) {
258
      jvalue value; value.f = f;
259
      return add(n, T_FLOAT, value);
260
    }
261
    Constant add(MachConstantNode* n, jdouble d) {
262
      jvalue value; value.d = d;
263
      return add(n, T_DOUBLE, value);
264 265
    }

266 267 268
    // Jump-table
    Constant  add_jump_table(MachConstantNode* n);
    void     fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const;
269 270
  };

D
duke 已提交
271 272 273 274 275
 private:
  // Fixed parameters to this compilation.
  const int             _compile_id;
  const bool            _save_argument_registers; // save/restore arg regs for trampolines
  const bool            _subsume_loads;         // Load can be matched as part of a larger op.
276
  const bool            _do_escape_analysis;    // Do escape analysis.
277
  const bool            _eliminate_boxing;      // Do boxing elimination.
D
duke 已提交
278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296
  ciMethod*             _method;                // The method being compiled.
  int                   _entry_bci;             // entry bci for osr methods.
  const TypeFunc*       _tf;                    // My kind of signature
  InlineTree*           _ilt;                   // Ditto (temporary).
  address               _stub_function;         // VM entry for stub being compiled, or NULL
  const char*           _stub_name;             // Name of stub or adapter being compiled, or NULL
  address               _stub_entry_point;      // Compile code entry for generated stub, or NULL

  // Control of this compilation.
  int                   _num_loop_opts;         // Number of iterations for doing loop optimiztions
  int                   _max_inline_size;       // Max inline size for this compilation
  int                   _freq_inline_size;      // Max hot method inline size for this compilation
  int                   _fixed_slots;           // count of frame slots not allocated by the register
                                                // allocator i.e. locks, original deopt pc, etc.
  // For deopt
  int                   _orig_pc_slot;
  int                   _orig_pc_slot_offset_in_bytes;

  int                   _major_progress;        // Count of something big happening
R
roland 已提交
297 298
  bool                  _inlining_progress;     // progress doing incremental inlining?
  bool                  _inlining_incrementally;// Are we doing incremental inlining (post parse)
D
duke 已提交
299 300 301
  bool                  _has_loops;             // True if the method _may_ have some loops
  bool                  _has_split_ifs;         // True if the method _may_ have some split-if
  bool                  _has_unsafe_access;     // True if the method _may_ produce faults in unsafe loads or stores.
302
  bool                  _has_stringbuilder;     // True StringBuffers or StringBuilders are allocated
303
  bool                  _has_boxed_value;       // True if a boxed object is allocated
304
  int                   _max_vector_size;       // Maximum size of generated vectors
D
duke 已提交
305 306 307 308 309
  uint                  _trap_hist[trapHistLength];  // Cumulative traps
  bool                  _trap_can_recompile;    // Have we emitted a recompiling trap?
  uint                  _decompile_count;       // Cumulative decompilation counts.
  bool                  _do_inlining;           // True if we intend to do inlining
  bool                  _do_scheduling;         // True if we intend to do scheduling
R
rasbold 已提交
310
  bool                  _do_freq_based_layout;  // True if we intend to do frequency based block layout
D
duke 已提交
311
  bool                  _do_count_invocations;  // True if we generate code to count invocations
312
  bool                  _do_method_data_update; // True if we generate code to update MethodData*s
D
duke 已提交
313 314
  int                   _AliasLevel;            // Locally-adjusted version of AliasLevel flag.
  bool                  _print_assembly;        // True if we should dump assembly code for this compilation
315 316
  bool                  _print_inlining;        // True if we should print inlining for this compilation
  bool                  _print_intrinsics;      // True if we should print intrinsics for this compilation
D
duke 已提交
317 318
#ifndef PRODUCT
  bool                  _trace_opto_output;
319
  bool                  _parsed_irreducible_loop; // True if ciTypeFlow detected irreducible loops during parsing
D
duke 已提交
320 321
#endif

322 323 324
  // JSR 292
  bool                  _has_method_handle_invokes; // True if this method has MethodHandle invokes.

D
duke 已提交
325 326 327 328 329 330 331
  // Compilation environment.
  Arena                 _comp_arena;            // Arena with lifetime equivalent to Compile
  ciEnv*                _env;                   // CI interface
  CompileLog*           _log;                   // from CompilerThread
  const char*           _failure_reason;        // for record_failure/failing pattern
  GrowableArray<CallGenerator*>* _intrinsics;   // List of intrinsics.
  GrowableArray<Node*>* _macro_nodes;           // List of nodes which need to be expanded before matching.
332
  GrowableArray<Node*>* _predicate_opaqs;       // List of Opaque1 nodes for the loop predicates.
333
  GrowableArray<Node*>* _expensive_nodes;       // List of nodes that are expensive to compute and that we'd better not let the GVN freely common
D
duke 已提交
334 335 336 337 338
  ConnectionGraph*      _congraph;
#ifndef PRODUCT
  IdealGraphPrinter*    _printer;
#endif

S
sla 已提交
339

D
duke 已提交
340 341
  // Node management
  uint                  _unique;                // Counter for unique Node indices
342 343 344
  VectorSet             _dead_node_list;        // Set of dead nodes
  uint                  _dead_node_count;       // Number of dead nodes; VectorSet::Size() is O(N).
                                                // So use this to keep count and make the call O(1).
D
duke 已提交
345 346 347 348 349 350 351 352 353 354 355
  debug_only(static int _debug_idx;)            // Monotonic counter (not reset), use -XX:BreakAtNode=<idx>
  Arena                 _node_arena;            // Arena for new-space Nodes
  Arena                 _old_arena;             // Arena for old-space Nodes, lifetime during xform
  RootNode*             _root;                  // Unique root of compilation, or NULL after bail-out.
  Node*                 _top;                   // Unique top node.  (Reset by various phases.)

  Node*                 _immutable_memory;      // Initial memory state

  Node*                 _recent_alloc_obj;
  Node*                 _recent_alloc_ctl;

356 357 358 359 360
  // Constant table
  ConstantTable         _constant_table;        // The constant table for this compile.
  MachConstantBaseNode* _mach_constant_base_node;  // Constant table base node singleton.


D
duke 已提交
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390
  // Blocked array of debugging and profiling information,
  // tracked per node.
  enum { _log2_node_notes_block_size = 8,
         _node_notes_block_size = (1<<_log2_node_notes_block_size)
  };
  GrowableArray<Node_Notes*>* _node_note_array;
  Node_Notes*           _default_node_notes;  // default notes for new nodes

  // After parsing and every bulk phase we hang onto the Root instruction.
  // The RootNode instruction is where the whole program begins.  It produces
  // the initial Control and BOTTOM for everybody else.

  // Type management
  Arena                 _Compile_types;         // Arena for all types
  Arena*                _type_arena;            // Alias for _Compile_types except in Initialize_shared()
  Dict*                 _type_dict;             // Intern table
  void*                 _type_hwm;              // Last allocation (see Type::operator new/delete)
  size_t                _type_last_size;        // Last allocation size (see Type::operator new/delete)
  ciMethod*             _last_tf_m;             // Cache for
  const TypeFunc*       _last_tf;               //  TypeFunc::make
  AliasType**           _alias_types;           // List of alias types seen so far.
  int                   _num_alias_types;       // Logical length of _alias_types
  int                   _max_alias_types;       // Physical length of _alias_types
  AliasCacheEntry       _alias_cache[AliasCacheSize]; // Gets aliases w/o data structure walking

  // Parsing, optimization
  PhaseGVN*             _initial_gvn;           // Results of parse-time PhaseGVN
  Unique_Node_List*     _for_igvn;              // Initial work-list for next round of Iterative GVN
  WarmCallInfo*         _warm_calls;            // Sorted work-list for heat-based inlining.

R
roland 已提交
391 392 393 394
  GrowableArray<CallGenerator*> _late_inlines;        // List of CallGenerators to be revisited after
                                                      // main parsing has finished.
  GrowableArray<CallGenerator*> _string_late_inlines; // same but for string operations

395 396
  GrowableArray<CallGenerator*> _boxing_late_inlines; // same but for boxing operations

R
roland 已提交
397 398 399
  int                           _late_inlines_pos;    // Where in the queue should the next late inlining candidate go (emulate depth first inlining)
  uint                          _number_of_mh_late_inlines; // number of method handle late inlining still pending

400

401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418
  // Inlining may not happen in parse order which would make
  // PrintInlining output confusing. Keep track of PrintInlining
  // pieces in order.
  class PrintInliningBuffer : public ResourceObj {
   private:
    CallGenerator* _cg;
    stringStream* _ss;

   public:
    PrintInliningBuffer()
      : _cg(NULL) { _ss = new stringStream(); }

    stringStream* ss() const { return _ss; }
    CallGenerator* cg() const { return _cg; }
    void set_cg(CallGenerator* cg) { _cg = cg; }
  };

  GrowableArray<PrintInliningBuffer>* _print_inlining_list;
419
  int _print_inlining_idx;
420

421 422 423 424 425 426
  // Only keep nodes in the expensive node list that need to be optimized
  void cleanup_expensive_nodes(PhaseIterGVN &igvn);
  // Use for sorting expensive nodes to bring similar nodes together
  static int cmp_expensive_nodes(Node** n1, Node** n2);
  // Expensive nodes list already sorted?
  bool expensive_nodes_sorted() const;
427 428
  // Remove the speculative part of types and clean up the graph
  void remove_speculative_types(PhaseIterGVN &igvn);
429

430 431 432
  // Are we within a PreserveJVMState block?
  int _preserve_jvm_state;

433 434 435
 public:

  outputStream* print_inlining_stream() const {
436
    return _print_inlining_list->adr_at(_print_inlining_idx)->ss();
437 438 439
  }

  void print_inlining_skip(CallGenerator* cg) {
440 441 442 443
    if (_print_inlining) {
      _print_inlining_list->adr_at(_print_inlining_idx)->set_cg(cg);
      _print_inlining_idx++;
      _print_inlining_list->insert_before(_print_inlining_idx, PrintInliningBuffer());
444 445 446 447
    }
  }

  void print_inlining_insert(CallGenerator* cg) {
448
    if (_print_inlining) {
449
      for (int i = 0; i < _print_inlining_list->length(); i++) {
450
        if (_print_inlining_list->adr_at(i)->cg() == cg) {
451
          _print_inlining_list->insert_before(i+1, PrintInliningBuffer());
452 453
          _print_inlining_idx = i+1;
          _print_inlining_list->adr_at(i)->set_cg(NULL);
454 455 456 457 458 459 460 461 462 463 464 465 466 467
          return;
        }
      }
      ShouldNotReachHere();
    }
  }

  void print_inlining(ciMethod* method, int inline_level, int bci, const char* msg = NULL) {
    stringStream ss;
    CompileTask::print_inlining(&ss, method, inline_level, bci, msg);
    print_inlining_stream()->print(ss.as_string());
  }

 private:
D
duke 已提交
468 469 470 471
  // Matching, CFG layout, allocation, code generation
  PhaseCFG*             _cfg;                   // Results of CFG finding
  bool                  _select_24_bit_instr;   // We selected an instruction with a 24-bit result
  bool                  _in_24_bit_fp_mode;     // We are emitting instructions with 24-bit results
472 473
  int                   _java_calls;            // Number of java calls in the method
  int                   _inner_loops;           // Number of inner loops in the method
D
duke 已提交
474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494
  Matcher*              _matcher;               // Engine to map ideal to machine instructions
  PhaseRegAlloc*        _regalloc;              // Results of register allocation.
  int                   _frame_slots;           // Size of total frame in stack slots
  CodeOffsets           _code_offsets;          // Offsets into the code for various interesting entries
  RegMask               _FIRST_STACK_mask;      // All stack slots usable for spills (depends on frame layout)
  Arena*                _indexSet_arena;        // control IndexSet allocation within PhaseChaitin
  void*                 _indexSet_free_block_list; // free list of IndexSet bit blocks

  uint                  _node_bundling_limit;
  Bundle*               _node_bundling_base;    // Information for instruction bundling

  // Instruction bits passed off to the VM
  int                   _method_size;           // Size of nmethod code segment in bytes
  CodeBuffer            _code_buffer;           // Where the code is assembled
  int                   _first_block_size;      // Size of unvalidated entry point code / OSR poison code
  ExceptionHandlerTable _handler_table;         // Table of native-code exception handlers
  ImplicitExceptionTable _inc_table;            // Table of implicit null checks in native code
  OopMapSet*            _oop_map_set;           // Table of oop maps (one for each safepoint location)
  static int            _CompiledZap_count;     // counter compared against CompileZap[First/Last]
  BufferBlob*           _scratch_buffer_blob;   // For temporary code buffers.
  relocInfo*            _scratch_locs_memory;   // For temporary code buffers.
495 496
  int                   _scratch_const_size;    // For temporary code buffers.
  bool                  _in_scratch_emit_size;  // true when in scratch_emit_size.
D
duke 已提交
497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512

 public:
  // Accessors

  // The Compile instance currently active in this (compiler) thread.
  static Compile* current() {
    return (Compile*) ciEnv::current()->compiler_data();
  }

  // ID for this compilation.  Useful for setting breakpoints in the debugger.
  int               compile_id() const          { return _compile_id; }

  // Does this compilation allow instructions to subsume loads?  User
  // instructions that subsume a load may result in an unschedulable
  // instruction sequence.
  bool              subsume_loads() const       { return _subsume_loads; }
513
  /** Do escape analysis. */
514
  bool              do_escape_analysis() const  { return _do_escape_analysis; }
515 516 517 518
  /** Do boxing elimination. */
  bool              eliminate_boxing() const    { return _eliminate_boxing; }
  /** Do aggressive boxing elimination. */
  bool              aggressive_unboxing() const { return _eliminate_boxing && AggressiveUnboxing; }
D
duke 已提交
519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537
  bool              save_argument_registers() const { return _save_argument_registers; }


  // Other fixed compilation parameters.
  ciMethod*         method() const              { return _method; }
  int               entry_bci() const           { return _entry_bci; }
  bool              is_osr_compilation() const  { return _entry_bci != InvocationEntryBci; }
  bool              is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); }
  const TypeFunc*   tf() const                  { assert(_tf!=NULL, ""); return _tf; }
  void         init_tf(const TypeFunc* tf)      { assert(_tf==NULL, ""); _tf = tf; }
  InlineTree*       ilt() const                 { return _ilt; }
  address           stub_function() const       { return _stub_function; }
  const char*       stub_name() const           { return _stub_name; }
  address           stub_entry_point() const    { return _stub_entry_point; }

  // Control of this compilation.
  int               fixed_slots() const         { assert(_fixed_slots >= 0, "");         return _fixed_slots; }
  void          set_fixed_slots(int n)          { _fixed_slots = n; }
  int               major_progress() const      { return _major_progress; }
R
roland 已提交
538 539 540 541
  void          set_inlining_progress(bool z)   { _inlining_progress = z; }
  int               inlining_progress() const   { return _inlining_progress; }
  void          set_inlining_incrementally(bool z) { _inlining_incrementally = z; }
  int               inlining_incrementally() const { return _inlining_incrementally; }
D
duke 已提交
542 543 544 545 546 547 548 549 550 551 552 553 554 555
  void          set_major_progress()            { _major_progress++; }
  void        clear_major_progress()            { _major_progress = 0; }
  int               num_loop_opts() const       { return _num_loop_opts; }
  void          set_num_loop_opts(int n)        { _num_loop_opts = n; }
  int               max_inline_size() const     { return _max_inline_size; }
  void          set_freq_inline_size(int n)     { _freq_inline_size = n; }
  int               freq_inline_size() const    { return _freq_inline_size; }
  void          set_max_inline_size(int n)      { _max_inline_size = n; }
  bool              has_loops() const           { return _has_loops; }
  void          set_has_loops(bool z)           { _has_loops = z; }
  bool              has_split_ifs() const       { return _has_split_ifs; }
  void          set_has_split_ifs(bool z)       { _has_split_ifs = z; }
  bool              has_unsafe_access() const   { return _has_unsafe_access; }
  void          set_has_unsafe_access(bool z)   { _has_unsafe_access = z; }
556 557
  bool              has_stringbuilder() const   { return _has_stringbuilder; }
  void          set_has_stringbuilder(bool z)   { _has_stringbuilder = z; }
558 559
  bool              has_boxed_value() const     { return _has_boxed_value; }
  void          set_has_boxed_value(bool z)     { _has_boxed_value = z; }
560 561
  int               max_vector_size() const     { return _max_vector_size; }
  void          set_max_vector_size(int s)      { _max_vector_size = s; }
D
duke 已提交
562 563 564 565 566 567 568 569 570 571 572
  void          set_trap_count(uint r, uint c)  { assert(r < trapHistLength, "oob");        _trap_hist[r] = c; }
  uint              trap_count(uint r) const    { assert(r < trapHistLength, "oob"); return _trap_hist[r]; }
  bool              trap_can_recompile() const  { return _trap_can_recompile; }
  void          set_trap_can_recompile(bool z)  { _trap_can_recompile = z; }
  uint              decompile_count() const     { return _decompile_count; }
  void          set_decompile_count(uint c)     { _decompile_count = c; }
  bool              allow_range_check_smearing() const;
  bool              do_inlining() const         { return _do_inlining; }
  void          set_do_inlining(bool z)         { _do_inlining = z; }
  bool              do_scheduling() const       { return _do_scheduling; }
  void          set_do_scheduling(bool z)       { _do_scheduling = z; }
R
rasbold 已提交
573 574
  bool              do_freq_based_layout() const{ return _do_freq_based_layout; }
  void          set_do_freq_based_layout(bool z){ _do_freq_based_layout = z; }
D
duke 已提交
575 576 577 578 579 580 581
  bool              do_count_invocations() const{ return _do_count_invocations; }
  void          set_do_count_invocations(bool z){ _do_count_invocations = z; }
  bool              do_method_data_update() const { return _do_method_data_update; }
  void          set_do_method_data_update(bool z) { _do_method_data_update = z; }
  int               AliasLevel() const          { return _AliasLevel; }
  bool              print_assembly() const       { return _print_assembly; }
  void          set_print_assembly(bool z)       { _print_assembly = z; }
582 583 584 585
  bool              print_inlining() const       { return _print_inlining; }
  void          set_print_inlining(bool z)       { _print_inlining = z; }
  bool              print_intrinsics() const     { return _print_intrinsics; }
  void          set_print_intrinsics(bool z)     { _print_intrinsics = z; }
D
duke 已提交
586 587 588 589 590 591
  // check the CompilerOracle for special behaviours for this compile
  bool          method_has_option(const char * option) {
    return method() != NULL && method()->has_option(option);
  }
#ifndef PRODUCT
  bool          trace_opto_output() const       { return _trace_opto_output; }
592 593
  bool              parsed_irreducible_loop() const { return _parsed_irreducible_loop; }
  void          set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; }
594
  int _in_dump_cnt;  // Required for dumping ir nodes.
D
duke 已提交
595 596
#endif

597 598 599 600
  // JSR 292
  bool              has_method_handle_invokes() const { return _has_method_handle_invokes;     }
  void          set_has_method_handle_invokes(bool z) {        _has_method_handle_invokes = z; }

S
sla 已提交
601 602
  jlong _latest_stage_start_counter;

D
duke 已提交
603 604 605 606
  void begin_method() {
#ifndef PRODUCT
    if (_printer) _printer->begin_method(this);
#endif
S
sla 已提交
607
    C->_latest_stage_start_counter = os::elapsed_counter();
D
duke 已提交
608
  }
S
sla 已提交
609 610 611 612 613 614 615 616 617 618 619 620 621

  void print_method(CompilerPhaseType cpt, int level = 1) {
    EventCompilerPhase event(UNTIMED);
    if (event.should_commit()) {
      event.set_starttime(C->_latest_stage_start_counter);
      event.set_endtime(os::elapsed_counter());
      event.set_phase((u1) cpt);
      event.set_compileID(C->_compile_id);
      event.set_phaseLevel(level);
      event.commit();
    }


D
duke 已提交
622
#ifndef PRODUCT
S
sla 已提交
623
    if (_printer) _printer->print_method(this, CompilerPhaseTypeHelper::to_string(cpt), level);
D
duke 已提交
624
#endif
S
sla 已提交
625
    C->_latest_stage_start_counter = os::elapsed_counter();
D
duke 已提交
626
  }
S
sla 已提交
627 628 629 630 631 632 633 634 635 636 637

  void end_method(int level = 1) {
    EventCompilerPhase event(UNTIMED);
    if (event.should_commit()) {
      event.set_starttime(C->_latest_stage_start_counter);
      event.set_endtime(os::elapsed_counter());
      event.set_phase((u1) PHASE_END);
      event.set_compileID(C->_compile_id);
      event.set_phaseLevel(level);
      event.commit();
    }
D
duke 已提交
638 639 640 641 642
#ifndef PRODUCT
    if (_printer) _printer->end_method();
#endif
  }

643 644 645 646 647 648
  int           macro_count()             const { return _macro_nodes->length(); }
  int           predicate_count()         const { return _predicate_opaqs->length();}
  int           expensive_count()         const { return _expensive_nodes->length(); }
  Node*         macro_node(int idx)       const { return _macro_nodes->at(idx); }
  Node*         predicate_opaque1_node(int idx) const { return _predicate_opaqs->at(idx);}
  Node*         expensive_node(int idx)   const { return _expensive_nodes->at(idx); }
D
duke 已提交
649
  ConnectionGraph* congraph()                   { return _congraph;}
650
  void set_congraph(ConnectionGraph* congraph)  { _congraph = congraph;}
D
duke 已提交
651 652 653 654 655 656 657 658 659 660
  void add_macro_node(Node * n) {
    //assert(n->is_macro(), "must be a macro node");
    assert(!_macro_nodes->contains(n), " duplicate entry in expand list");
    _macro_nodes->append(n);
  }
  void remove_macro_node(Node * n) {
    // this function may be called twice for a node so check
    // that the node is in the array before attempting to remove it
    if (_macro_nodes->contains(n))
      _macro_nodes->remove(n);
661 662 663 664 665
    // remove from _predicate_opaqs list also if it is there
    if (predicate_count() > 0 && _predicate_opaqs->contains(n)){
      _predicate_opaqs->remove(n);
    }
  }
666 667 668 669 670 671
  void add_expensive_node(Node * n);
  void remove_expensive_node(Node * n) {
    if (_expensive_nodes->contains(n)) {
      _expensive_nodes->remove(n);
    }
  }
672 673 674 675
  void add_predicate_opaq(Node * n) {
    assert(!_predicate_opaqs->contains(n), " duplicate entry in predicate opaque1");
    assert(_macro_nodes->contains(n), "should have already been in macro list");
    _predicate_opaqs->append(n);
D
duke 已提交
676
  }
677 678 679
  // remove the opaque nodes that protect the predicates so that the unused checks and
  // uncommon traps will be eliminated from the graph.
  void cleanup_loop_predicates(PhaseIterGVN &igvn);
680 681 682
  bool is_predicate_opaq(Node * n) {
    return _predicate_opaqs->contains(n);
  }
D
duke 已提交
683

684 685 686 687 688 689 690
  // Are there candidate expensive nodes for optimization?
  bool should_optimize_expensive_nodes(PhaseIterGVN &igvn);
  // Check whether n1 and n2 are similar
  static int cmp_expensive_nodes(Node* n1, Node* n2);
  // Sort expensive nodes to locate similar expensive nodes
  void sort_expensive_nodes();

D
duke 已提交
691 692 693 694 695
  // Compilation environment.
  Arena*            comp_arena()                { return &_comp_arena; }
  ciEnv*            env() const                 { return _env; }
  CompileLog*       log() const                 { return _log; }
  bool              failing() const             { return _env->failing() || _failure_reason != NULL; }
696
  const char*       failure_reason() { return _failure_reason; }
D
duke 已提交
697 698 699 700 701 702 703 704 705 706 707 708 709 710
  bool              failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); }

  void record_failure(const char* reason);
  void record_method_not_compilable(const char* reason, bool all_tiers = false) {
    // All bailouts cover "all_tiers" when TieredCompilation is off.
    if (!TieredCompilation) all_tiers = true;
    env()->record_method_not_compilable(reason, all_tiers);
    // Record failure reason.
    record_failure(reason);
  }
  void record_method_not_compilable_all_tiers(const char* reason) {
    record_method_not_compilable(reason, true);
  }
  bool check_node_count(uint margin, const char* reason) {
711
    if (live_nodes() + margin > (uint)MaxNodeLimit) {
D
duke 已提交
712 713 714 715 716 717 718 719
      record_method_not_compilable(reason);
      return true;
    } else {
      return false;
    }
  }

  // Node management
720 721 722 723 724 725 726 727 728 729
  uint         unique() const              { return _unique; }
  uint         next_unique()               { return _unique++; }
  void         set_unique(uint i)          { _unique = i; }
  static int   debug_idx()                 { return debug_only(_debug_idx)+0; }
  static void  set_debug_idx(int i)        { debug_only(_debug_idx = i); }
  Arena*       node_arena()                { return &_node_arena; }
  Arena*       old_arena()                 { return &_old_arena; }
  RootNode*    root() const                { return _root; }
  void         set_root(RootNode* r)       { _root = r; }
  StartNode*   start() const;              // (Derived from root.)
D
duke 已提交
730
  void         init_start(StartNode* s);
731
  Node*        immutable_memory();
D
duke 已提交
732

733 734 735
  Node*        recent_alloc_ctl() const    { return _recent_alloc_ctl; }
  Node*        recent_alloc_obj() const    { return _recent_alloc_obj; }
  void         set_recent_alloc(Node* ctl, Node* obj) {
D
duke 已提交
736 737
                                                  _recent_alloc_ctl = ctl;
                                                  _recent_alloc_obj = obj;
738 739 740 741
                                           }
  void         record_dead_node(uint idx)  { if (_dead_node_list.test_set(idx)) return;
                                             _dead_node_count++;
                                           }
742
  bool         is_dead_node(uint idx)      { return _dead_node_list.test(idx) != 0; }
743 744 745 746
  uint         dead_node_count()           { return _dead_node_count; }
  void         reset_dead_node_list()      { _dead_node_list.Reset();
                                             _dead_node_count = 0;
                                           }
747
  uint          live_nodes() const         {
748 749 750 751 752 753 754 755
    int  val = _unique - _dead_node_count;
    assert (val >= 0, err_msg_res("number of tracked dead nodes %d more than created nodes %d", _unique, _dead_node_count));
            return (uint) val;
                                           }
#ifdef ASSERT
  uint         count_live_nodes_by_graph_walk();
  void         print_missing_nodes();
#endif
D
duke 已提交
756

757 758 759 760 761 762
  // Constant table
  ConstantTable&   constant_table() { return _constant_table; }

  MachConstantBaseNode*     mach_constant_base_node();
  bool                  has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; }

D
duke 已提交
763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813
  // Handy undefined Node
  Node*             top() const                 { return _top; }

  // these are used by guys who need to know about creation and transformation of top:
  Node*             cached_top_node()           { return _top; }
  void          set_cached_top_node(Node* tn);

  GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; }
  void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; }
  Node_Notes* default_node_notes() const        { return _default_node_notes; }
  void    set_default_node_notes(Node_Notes* n) { _default_node_notes = n; }

  Node_Notes*       node_notes_at(int idx) {
    return locate_node_notes(_node_note_array, idx, false);
  }
  inline bool   set_node_notes_at(int idx, Node_Notes* value);

  // Copy notes from source to dest, if they exist.
  // Overwrite dest only if source provides something.
  // Return true if information was moved.
  bool copy_node_notes_to(Node* dest, Node* source);

  // Workhorse function to sort out the blocked Node_Notes array:
  inline Node_Notes* locate_node_notes(GrowableArray<Node_Notes*>* arr,
                                       int idx, bool can_grow = false);

  void grow_node_notes(GrowableArray<Node_Notes*>* arr, int grow_by);

  // Type management
  Arena*            type_arena()                { return _type_arena; }
  Dict*             type_dict()                 { return _type_dict; }
  void*             type_hwm()                  { return _type_hwm; }
  size_t            type_last_size()            { return _type_last_size; }
  int               num_alias_types()           { return _num_alias_types; }

  void          init_type_arena()                       { _type_arena = &_Compile_types; }
  void          set_type_arena(Arena* a)                { _type_arena = a; }
  void          set_type_dict(Dict* d)                  { _type_dict = d; }
  void          set_type_hwm(void* p)                   { _type_hwm = p; }
  void          set_type_last_size(size_t sz)           { _type_last_size = sz; }

  const TypeFunc* last_tf(ciMethod* m) {
    return (m == _last_tf_m) ? _last_tf : NULL;
  }
  void set_last_tf(ciMethod* m, const TypeFunc* tf) {
    assert(m != NULL || tf == NULL, "");
    _last_tf_m = m;
    _last_tf = tf;
  }

  AliasType*        alias_type(int                idx)  { assert(idx < num_alias_types(), "oob"); return _alias_types[idx]; }
814
  AliasType*        alias_type(const TypePtr* adr_type, ciField* field = NULL) { return find_alias_type(adr_type, false, field); }
D
duke 已提交
815 816 817 818 819 820 821 822 823 824 825 826 827 828
  bool         have_alias_type(const TypePtr* adr_type);
  AliasType*        alias_type(ciField*         field);

  int               get_alias_index(const TypePtr* at)  { return alias_type(at)->index(); }
  const TypePtr*    get_adr_type(uint aidx)             { return alias_type(aidx)->adr_type(); }
  int               get_general_index(uint aidx)        { return alias_type(aidx)->general_index(); }

  // Building nodes
  void              rethrow_exceptions(JVMState* jvms);
  void              return_values(JVMState* jvms);
  JVMState*         build_start_state(StartNode* start, const TypeFunc* tf);

  // Decide how to build a call.
  // The profile factor is a discount to apply to this site's interp. profile.
829
  CallGenerator*    call_generator(ciMethod* call_method, int vtable_index, bool call_does_dispatch,
830 831
                                   JVMState* jvms, bool allow_inline, float profile_factor, ciKlass* speculative_receiver_type = NULL,
                                   bool allow_intrinsics = true, bool delayed_forbidden = false);
832 833 834 835 836 837
  bool should_delay_inlining(ciMethod* call_method, JVMState* jvms) {
    return should_delay_string_inlining(call_method, jvms) ||
           should_delay_boxing_inlining(call_method, jvms);
  }
  bool should_delay_string_inlining(ciMethod* call_method, JVMState* jvms);
  bool should_delay_boxing_inlining(ciMethod* call_method, JVMState* jvms);
D
duke 已提交
838

839 840 841 842 843 844 845 846
  // Helper functions to identify inlining potential at call-site
  ciMethod* optimize_virtual_call(ciMethod* caller, int bci, ciInstanceKlass* klass,
                                  ciMethod* callee, const TypeOopPtr* receiver_type,
                                  bool is_virtual,
                                  bool &call_does_dispatch, int &vtable_index);
  ciMethod* optimize_inlining(ciMethod* caller, int bci, ciInstanceKlass* klass,
                              ciMethod* callee, const TypeOopPtr* receiver_type);

D
duke 已提交
847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865
  // Report if there were too many traps at a current method and bci.
  // Report if a trap was recorded, and/or PerMethodTrapLimit was exceeded.
  // If there is no MDO at all, report no trap unless told to assume it.
  bool too_many_traps(ciMethod* method, int bci, Deoptimization::DeoptReason reason);
  // This version, unspecific to a particular bci, asks if
  // PerMethodTrapLimit was exceeded for all inlined methods seen so far.
  bool too_many_traps(Deoptimization::DeoptReason reason,
                      // Privately used parameter for logging:
                      ciMethodData* logmd = NULL);
  // Report if there were too many recompiles at a method and bci.
  bool too_many_recompiles(ciMethod* method, int bci, Deoptimization::DeoptReason reason);

  // Parsing, optimization
  PhaseGVN*         initial_gvn()               { return _initial_gvn; }
  Unique_Node_List* for_igvn()                  { return _for_igvn; }
  inline void       record_for_igvn(Node* n);   // Body is after class Unique_Node_List.
  void          set_initial_gvn(PhaseGVN *gvn)           { _initial_gvn = gvn; }
  void          set_for_igvn(Unique_Node_List *for_igvn) { _for_igvn = for_igvn; }

866 867 868 869 870
  // Replace n by nn using initial_gvn, calling hash_delete and
  // record_for_igvn as needed.
  void gvn_replace_by(Node* n, Node* nn);


D
duke 已提交
871
  void              identify_useful_nodes(Unique_Node_List &useful);
872
  void              update_dead_node_list(Unique_Node_List &useful);
873
  void              remove_useless_nodes (Unique_Node_List &useful);
D
duke 已提交
874 875 876 877 878

  WarmCallInfo*     warm_calls() const          { return _warm_calls; }
  void          set_warm_calls(WarmCallInfo* l) { _warm_calls = l; }
  WarmCallInfo* pop_warm_call();

879
  // Record this CallGenerator for inlining at the end of parsing.
R
roland 已提交
880 881 882 883 884 885 886 887 888 889 890 891 892
  void              add_late_inline(CallGenerator* cg)        {
    _late_inlines.insert_before(_late_inlines_pos, cg);
    _late_inlines_pos++;
  }

  void              prepend_late_inline(CallGenerator* cg)    {
    _late_inlines.insert_before(0, cg);
  }

  void              add_string_late_inline(CallGenerator* cg) {
    _string_late_inlines.push(cg);
  }

893 894 895 896
  void              add_boxing_late_inline(CallGenerator* cg) {
    _boxing_late_inlines.push(cg);
  }

R
roland 已提交
897
  void remove_useless_late_inlines(GrowableArray<CallGenerator*>* inlines, Unique_Node_List &useful);
898

899 900
  void dump_inlining();

R
roland 已提交
901 902 903 904 905 906 907 908 909 910 911 912 913 914 915
  bool over_inlining_cutoff() const {
    if (!inlining_incrementally()) {
      return unique() > (uint)NodeCountInliningCutoff;
    } else {
      return live_nodes() > (uint)LiveNodeCountInliningCutoff;
    }
  }

  void inc_number_of_mh_late_inlines() { _number_of_mh_late_inlines++; }
  void dec_number_of_mh_late_inlines() { assert(_number_of_mh_late_inlines > 0, "_number_of_mh_late_inlines < 0 !"); _number_of_mh_late_inlines--; }
  bool has_mh_late_inlines() const     { return _number_of_mh_late_inlines > 0; }

  void inline_incrementally_one(PhaseIterGVN& igvn);
  void inline_incrementally(PhaseIterGVN& igvn);
  void inline_string_calls(bool parse_time);
916
  void inline_boxing_calls(PhaseIterGVN& igvn);
R
roland 已提交
917

D
duke 已提交
918 919 920 921
  // Matching, CFG layout, allocation, code generation
  PhaseCFG*         cfg()                       { return _cfg; }
  bool              select_24_bit_instr() const { return _select_24_bit_instr; }
  bool              in_24_bit_fp_mode() const   { return _in_24_bit_fp_mode; }
922 923 924
  bool              has_java_calls() const      { return _java_calls > 0; }
  int               java_calls() const          { return _java_calls; }
  int               inner_loops() const         { return _inner_loops; }
D
duke 已提交
925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950
  Matcher*          matcher()                   { return _matcher; }
  PhaseRegAlloc*    regalloc()                  { return _regalloc; }
  int               frame_slots() const         { return _frame_slots; }
  int               frame_size_in_words() const; // frame_slots in units of the polymorphic 'words'
  RegMask&          FIRST_STACK_mask()          { return _FIRST_STACK_mask; }
  Arena*            indexSet_arena()            { return _indexSet_arena; }
  void*             indexSet_free_block_list()  { return _indexSet_free_block_list; }
  uint              node_bundling_limit()       { return _node_bundling_limit; }
  Bundle*           node_bundling_base()        { return _node_bundling_base; }
  void          set_node_bundling_limit(uint n) { _node_bundling_limit = n; }
  void          set_node_bundling_base(Bundle* b) { _node_bundling_base = b; }
  bool          starts_bundle(const Node *n) const;
  bool          need_stack_bang(int frame_size_in_bytes) const;
  bool          need_register_stack_bang() const;

  void          set_matcher(Matcher* m)                 { _matcher = m; }
//void          set_regalloc(PhaseRegAlloc* ra)           { _regalloc = ra; }
  void          set_indexSet_arena(Arena* a)            { _indexSet_arena = a; }
  void          set_indexSet_free_block_list(void* p)   { _indexSet_free_block_list = p; }

  // Remember if this compilation changes hardware mode to 24-bit precision
  void set_24_bit_selection_and_mode(bool selection, bool mode) {
    _select_24_bit_instr = selection;
    _in_24_bit_fp_mode   = mode;
  }

951 952
  void  set_java_calls(int z) { _java_calls  = z; }
  void set_inner_loops(int z) { _inner_loops = z; }
D
duke 已提交
953 954 955 956 957 958 959 960 961 962 963 964 965

  // Instruction bits passed off to the VM
  int               code_size()                 { return _method_size; }
  CodeBuffer*       code_buffer()               { return &_code_buffer; }
  int               first_block_size()          { return _first_block_size; }
  void              set_frame_complete(int off) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); }
  ExceptionHandlerTable*  handler_table()       { return &_handler_table; }
  ImplicitExceptionTable* inc_table()           { return &_inc_table; }
  OopMapSet*        oop_map_set()               { return _oop_map_set; }
  DebugInformationRecorder* debug_info()        { return env()->debug_info(); }
  Dependencies*     dependencies()              { return env()->dependencies(); }
  static int        CompiledZap_count()         { return _CompiledZap_count; }
  BufferBlob*       scratch_buffer_blob()       { return _scratch_buffer_blob; }
966 967
  void         init_scratch_buffer_blob(int const_size);
  void        clear_scratch_buffer_blob();
D
duke 已提交
968 969 970 971 972 973
  void          set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; }
  relocInfo*        scratch_locs_memory()       { return _scratch_locs_memory; }
  void          set_scratch_locs_memory(relocInfo* b)  { _scratch_locs_memory = b; }

  // emit to scratch blob, report resulting size
  uint              scratch_emit_size(const Node* n);
974 975
  void       set_in_scratch_emit_size(bool x)   {        _in_scratch_emit_size = x; }
  bool           in_scratch_emit_size() const   { return _in_scratch_emit_size;     }
D
duke 已提交
976 977 978 979 980 981 982 983 984 985 986 987 988

  enum ScratchBufferBlob {
    MAX_inst_size       = 1024,
    MAX_locs_size       = 128, // number of relocInfo elements
    MAX_const_size      = 128,
    MAX_stubs_size      = 128
  };

  // Major entry point.  Given a Scope, compile the associated method.
  // For normal compilations, entry_bci is InvocationEntryBci.  For on stack
  // replacement, entry_bci indicates the bytecode for which to compile a
  // continuation.
  Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target,
989 990
          int entry_bci, bool subsume_loads, bool do_escape_analysis,
          bool eliminate_boxing);
D
duke 已提交
991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034

  // Second major entry point.  From the TypeFunc signature, generate code
  // to pass arguments from the Java calling convention to the C calling
  // convention.
  Compile(ciEnv* ci_env, const TypeFunc *(*gen)(),
          address stub_function, const char *stub_name,
          int is_fancy_jump, bool pass_tls,
          bool save_arg_registers, bool return_pc);

  // From the TypeFunc signature, generate code to pass arguments
  // from Compiled calling convention to Interpreter's calling convention
  void Generate_Compiled_To_Interpreter_Graph(const TypeFunc *tf, address interpreter_entry);

  // From the TypeFunc signature, generate code to pass arguments
  // from Interpreter's calling convention to Compiler's calling convention
  void Generate_Interpreter_To_Compiled_Graph(const TypeFunc *tf);

  // Are we compiling a method?
  bool has_method() { return method() != NULL; }

  // Maybe print some information about this compile.
  void print_compile_messages();

  // Final graph reshaping, a post-pass after the regular optimizer is done.
  bool final_graph_reshaping();

  // returns true if adr is completely contained in the given alias category
  bool must_alias(const TypePtr* adr, int alias_idx);

  // returns true if adr overlaps with the given alias category
  bool can_alias(const TypePtr* adr, int alias_idx);

  // Driver for converting compiler's IR into machine code bits
  void Output();

  // Accessors for node bundling info.
  Bundle* node_bundling(const Node *n);
  bool valid_bundle_info(const Node *n);

  // Schedule and Bundle the instructions
  void ScheduleAndBundle();

  // Build OopMaps for each GC point
  void BuildOopMaps();
1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048

  // Append debug info for the node "local" at safepoint node "sfpt" to the
  // "array",   May also consult and add to "objs", which describes the
  // scalar-replaced objects.
  void FillLocArray( int idx, MachSafePointNode* sfpt,
                     Node *local, GrowableArray<ScopeValue*> *array,
                     GrowableArray<ScopeValue*> *objs );

  // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL.
  static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id);
  // Requres that "objs" does not contains an ObjectValue whose id matches
  // that of "sv.  Appends "sv".
  static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs,
                                     ObjectValue* sv );
D
duke 已提交
1049 1050 1051 1052

  // Process an OopMap Element while emitting nodes
  void Process_OopMap_Node(MachNode *mach, int code_offset);

1053 1054 1055
  // Initialize code buffer
  CodeBuffer* init_buffer(uint* blk_starts);

D
duke 已提交
1056
  // Write out basic block data to code buffer
1057
  void fill_buffer(CodeBuffer* cb, uint* blk_starts);
D
duke 已提交
1058 1059

  // Determine which variable sized branches can be shortened
1060 1061
  void shorten_branches(uint* blk_starts, int& code_size, int& reloc_size, int& stub_size);

D
duke 已提交
1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108
  // Compute the size of first NumberOfLoopInstrToAlign instructions
  // at the head of a loop.
  void compute_loop_first_inst_sizes();

  // Compute the information for the exception tables
  void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels);

  // Stack slots that may be unused by the calling convention but must
  // otherwise be preserved.  On Intel this includes the return address.
  // On PowerPC it includes the 4 words holding the old TOC & LR glue.
  uint in_preserve_stack_slots();

  // "Top of Stack" slots that may be unused by the calling convention but must
  // otherwise be preserved.
  // On Intel these are not necessary and the value can be zero.
  // On Sparc this describes the words reserved for storing a register window
  // when an interrupt occurs.
  static uint out_preserve_stack_slots();

  // Number of outgoing stack slots killed above the out_preserve_stack_slots
  // for calls to C.  Supports the var-args backing area for register parms.
  uint varargs_C_out_slots_killed() const;

  // Number of Stack Slots consumed by a synchronization entry
  int sync_stack_slots() const;

  // Compute the name of old_SP.  See <arch>.ad for frame layout.
  OptoReg::Name compute_old_SP();

#ifdef ENABLE_ZAP_DEAD_LOCALS
  static bool is_node_getting_a_safepoint(Node*);
  void Insert_zap_nodes();
  Node* call_zap_node(MachSafePointNode* n, int block_no);
#endif

 private:
  // Phase control:
  void Init(int aliaslevel);                     // Prepare for a single compilation
  int  Inline_Warm();                            // Find more inlining work.
  void Finish_Warm();                            // Give up on further inlines.
  void Optimize();                               // Given a graph, optimize it
  void Code_Gen();                               // Generate code from a graph

  // Management of the AliasType table.
  void grow_alias_types();
  AliasCacheEntry* probe_alias_cache(const TypePtr* adr_type);
  const TypePtr *flatten_alias_type(const TypePtr* adr_type) const;
1109
  AliasType* find_alias_type(const TypePtr* adr_type, bool no_create, ciField* field);
D
duke 已提交
1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123

  void verify_top(Node*) const PRODUCT_RETURN;

  // Intrinsic setup.
  void           register_library_intrinsics();                            // initializer
  CallGenerator* make_vm_intrinsic(ciMethod* m, bool is_virtual);          // constructor
  int            intrinsic_insertion_index(ciMethod* m, bool is_virtual);  // helper
  CallGenerator* find_intrinsic(ciMethod* m, bool is_virtual);             // query fn
  void           register_intrinsic(CallGenerator* cg);                    // update fn

#ifndef PRODUCT
  static juint  _intrinsic_hist_count[vmIntrinsics::ID_LIMIT];
  static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT];
#endif
1124 1125 1126 1127 1128
  // Function calls made by the public function final_graph_reshaping.
  // No need to be made public as they are not called elsewhere.
  void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &frc);
  void final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc );
  void eliminate_redundant_card_marks(Node* n);
D
duke 已提交
1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151

 public:

  // Note:  Histogram array size is about 1 Kb.
  enum {                        // flag bits:
    _intrinsic_worked = 1,      // succeeded at least once
    _intrinsic_failed = 2,      // tried it but it failed
    _intrinsic_disabled = 4,    // was requested but disabled (e.g., -XX:-InlineUnsafeOps)
    _intrinsic_virtual = 8,     // was seen in the virtual form (rare)
    _intrinsic_both = 16        // was seen in the non-virtual form (usual)
  };
  // Update histogram.  Return boolean if this is a first-time occurrence.
  static bool gather_intrinsic_statistics(vmIntrinsics::ID id,
                                          bool is_virtual, int flags) PRODUCT_RETURN0;
  static void print_intrinsic_statistics() PRODUCT_RETURN;

  // Graph verification code
  // Walk the node list, verifying that there is a one-to-one
  // correspondence between Use-Def edges and Def-Use edges
  // The option no_dead_code enables stronger checks that the
  // graph is strongly connected from root in both directions.
  void verify_graph_edges(bool no_dead_code = false) PRODUCT_RETURN;

1152 1153 1154
  // Verify GC barrier patterns
  void verify_barriers() PRODUCT_RETURN;

D
duke 已提交
1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166
  // End-of-run dumps.
  static void print_statistics() PRODUCT_RETURN;

  // Dump formatted assembly
  void dump_asm(int *pcs = NULL, uint pc_limit = 0) PRODUCT_RETURN;
  void dump_pc(int *pcs, int pc_limit, Node *n);

  // Verify ADLC assumptions during startup
  static void adlc_verification() PRODUCT_RETURN;

  // Definitions of pd methods
  static void pd_compiler2_init();
1167 1168 1169

  // Auxiliary method for randomized fuzzing/stressing
  static bool randomized_select(int count);
1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184

  // enter a PreserveJVMState block
  void inc_preserve_jvm_state() {
    _preserve_jvm_state++;
  }

  // exit a PreserveJVMState block
  void dec_preserve_jvm_state() {
    _preserve_jvm_state--;
    assert(_preserve_jvm_state >= 0, "_preserve_jvm_state shouldn't be negative");
  }

  bool has_preserve_jvm_state() const {
    return _preserve_jvm_state > 0;
  }
D
duke 已提交
1185
};
1186 1187

#endif // SHARE_VM_OPTO_COMPILE_HPP