当前位置:   article > 正文

分析类初始化阶段的死锁现象-问题解析_com.sun.jmx.mbeanserver.mbeanintrospector.unwrapin

com.sun.jmx.mbeanserver.mbeanintrospector.unwrapinvocationtargetexception(mb

上代码

![image.png](https://img-blog.csdnimg.cn/img_convert/f6ef8a951f08fd78d1a7693f98c0d1d2.png#clientId=u34e7b8d0-0f31-4&crop=0&crop=0&crop=1&crop=1&from=paste&id=u958be568&margin=[object Object]&name=image.png&originHeight=1202&originWidth=886&originalType=binary&ratio=1&rotation=0&showTitle=false&size=307067&status=done&style=none&taskId=udb2925a1-0829-467e-8814-010afed02ff&title=)

package test;

/**
 * @author Ye-Ma
 * @Description TODO
 * @date 2022/1/7-11:13
 */
public class InitDeadLock {
    public static void main(String[] args) {
        new Thread(() -> new AA()).start();
        new Thread(() -> new BB()).start();
    }


}

class AA {
    static {
        System.out.println("Class A init");
        new BB();
    }
}

class BB {
    static {
        System.out.println("Class B init");
        new AA();
    }
}
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29

字节码

Classfile /D:/JavaClass/jucDemo/target/classes/test/InitDeadLock.class
  Last modified 2022-1-7; size 1183 bytes
  MD5 checksum b2b640931bdf6721e880f1f024e0a39b
  Compiled from "InitDeadLock.java"
public class test.InitDeadLock
  minor version: 0
  major version: 52
  flags: ACC_PUBLIC, ACC_SUPER
Constant pool:
   #1 = Methodref          #12.#28        // java/lang/Object."<init>":()V
   #2 = Class              #29            // java/lang/Thread
   #3 = InvokeDynamic      #0:#34         // #0:run:()Ljava/lang/Runnable;
   #4 = Methodref          #2.#35         // java/lang/Thread."<init>":(Ljava/lang/Runnable;)V
   #5 = Methodref          #2.#36         // java/lang/Thread.start:()V
   #6 = InvokeDynamic      #1:#34         // #1:run:()Ljava/lang/Runnable;
   #7 = Class              #38            // test/BB
   #8 = Methodref          #7.#28         // test/BB."<init>":()V
   #9 = Class              #39            // test/AA
  #10 = Methodref          #9.#28         // test/AA."<init>":()V
  #11 = Class              #40            // test/InitDeadLock
  #12 = Class              #41            // java/lang/Object
  #13 = Utf8               <init>
  #14 = Utf8               ()V
  #15 = Utf8               Code
  #16 = Utf8               LineNumberTable
  #17 = Utf8               LocalVariableTable
  #18 = Utf8               this
  #19 = Utf8               Ltest/InitDeadLock;
  #20 = Utf8               main
  #21 = Utf8               ([Ljava/lang/String;)V
  #22 = Utf8               args
  #23 = Utf8               [Ljava/lang/String;
  #24 = Utf8               lambda$main$1
  #25 = Utf8               lambda$main$0
  #26 = Utf8               SourceFile
  #27 = Utf8               InitDeadLock.java
  #28 = NameAndType        #13:#14        // "<init>":()V
  #29 = Utf8               java/lang/Thread
  #30 = Utf8               BootstrapMethods
  #31 = MethodHandle       #6:#42         // invokestatic java/lang/invoke/LambdaMetafactory.metafactory:(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
  #32 = MethodType         #14            //  ()V
  #33 = MethodHandle       #6:#43         // invokestatic test/InitDeadLock.lambda$main$0:()V
  #34 = NameAndType        #44:#45        // run:()Ljava/lang/Runnable;
  #35 = NameAndType        #13:#46        // "<init>":(Ljava/lang/Runnable;)V
  #36 = NameAndType        #47:#14        // start:()V
  #37 = MethodHandle       #6:#48         // invokestatic test/InitDeadLock.lambda$main$1:()V
  #38 = Utf8               test/BB
  #39 = Utf8               test/AA
  #40 = Utf8               test/InitDeadLock
  #41 = Utf8               java/lang/Object
  #42 = Methodref          #49.#50        // java/lang/invoke/LambdaMetafactory.metafactory:(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
  #43 = Methodref          #11.#51        // test/InitDeadLock.lambda$main$0:()V
  #44 = Utf8               run
  #45 = Utf8               ()Ljava/lang/Runnable;
  #46 = Utf8               (Ljava/lang/Runnable;)V
  #47 = Utf8               start
  #48 = Methodref          #11.#52        // test/InitDeadLock.lambda$main$1:()V
  #49 = Class              #53            // java/lang/invoke/LambdaMetafactory
  #50 = NameAndType        #54:#58        // metafactory:(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
  #51 = NameAndType        #25:#14        // lambda$main$0:()V
  #52 = NameAndType        #24:#14        // lambda$main$1:()V
  #53 = Utf8               java/lang/invoke/LambdaMetafactory
  #54 = Utf8               metafactory
  #55 = Class              #60            // java/lang/invoke/MethodHandles$Lookup
  #56 = Utf8               Lookup
  #57 = Utf8               InnerClasses
  #58 = Utf8               (Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
  #59 = Class              #61            // java/lang/invoke/MethodHandles
  #60 = Utf8               java/lang/invoke/MethodHandles$Lookup
  #61 = Utf8               java/lang/invoke/MethodHandles
{
  public test.InitDeadLock();
    descriptor: ()V
    flags: ACC_PUBLIC
    Code:
      stack=1, locals=1, args_size=1
         0: aload_0
         1: invokespecial #1                  // Method java/lang/Object."<init>":()V
         4: return
      LineNumberTable:
        line 8: 0
      LocalVariableTable:
        Start  Length  Slot  Name   Signature
            0       5     0  this   Ltest/InitDeadLock;

  public static void main(java.lang.String[]);
    descriptor: ([Ljava/lang/String;)V
    flags: ACC_PUBLIC, ACC_STATIC
    Code:
      stack=3, locals=1, args_size=1
         0: new           #2                  // class java/lang/Thread
         3: dup
         4: invokedynamic #3,  0              // InvokeDynamic #0:run:()Ljava/lang/Runnable;
         9: invokespecial #4                  // Method java/lang/Thread."<init>":(Ljava/lang/Runnable;)V
        12: invokevirtual #5                  // Method java/lang/Thread.start:()V
        15: new           #2                  // class java/lang/Thread
        18: dup
        19: invokedynamic #6,  0              // InvokeDynamic #1:run:()Ljava/lang/Runnable;
        24: invokespecial #4                  // Method java/lang/Thread."<init>":(Ljava/lang/Runnable;)V
        27: invokevirtual #5                  // Method java/lang/Thread.start:()V
        30: return
      LineNumberTable:
        line 10: 0
        line 11: 15
        line 12: 30
      LocalVariableTable:
        Start  Length  Slot  Name   Signature
            0      31     0  args   [Ljava/lang/String;

  private static void lambda$main$1();
    descriptor: ()V
    flags: ACC_PRIVATE, ACC_STATIC, ACC_SYNTHETIC
    Code:
      stack=2, locals=0, args_size=0
         0: new           #7                  // class test/BB
         3: dup
         4: invokespecial #8                  // Method test/BB."<init>":()V
         7: pop
         8: return
      LineNumberTable:
        line 11: 0

  private static void lambda$main$0();
    descriptor: ()V
    flags: ACC_PRIVATE, ACC_STATIC, ACC_SYNTHETIC
    Code:
      stack=2, locals=0, args_size=0
         0: new           #9                  // class test/AA
         3: dup
         4: invokespecial #10                 // Method test/AA."<init>":()V
         7: pop
         8: return
      LineNumberTable:
        line 10: 0
}
SourceFile: "InitDeadLock.java"
InnerClasses:
     public static final #56= #55 of #59; //Lookup=class java/lang/invoke/MethodHandles$Lookup of class java/lang/invoke/MethodHandles
BootstrapMethods:
  0: #31 invokestatic java/lang/invoke/LambdaMetafactory.metafactory:(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
    Method arguments:
      #32 ()V
      #33 invokestatic test/InitDeadLock.lambda$main$0:()V
      #32 ()V
  1: #31 invokestatic java/lang/invoke/LambdaMetafactory.metafactory:(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;
    Method arguments:
      #32 ()V
      #37 invokestatic test/InitDeadLock.lambda$main$1:()V
      #32 ()V

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120
  • 121
  • 122
  • 123
  • 124
  • 125
  • 126
  • 127
  • 128
  • 129
  • 130
  • 131
  • 132
  • 133
  • 134
  • 135
  • 136
  • 137
  • 138
  • 139
  • 140
  • 141
  • 142
  • 143
  • 144
  • 145
  • 146
  • 147
  • 148
  • 149
  • 150
名称: RMI TCP Connection(3)-192.168.196.138
状态: RUNNABLE
总阻止数: 1, 总等待数: 0

堆栈跟踪: 
sun.management.ThreadImpl.dumpThreads0(Native Method)
sun.management.ThreadImpl.dumpAllThreads(ThreadImpl.java:496)
sun.management.ThreadImpl.dumpAllThreads(ThreadImpl.java:484)
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
java.lang.reflect.Method.invoke(Method.java:498)
sun.reflect.misc.Trampoline.invoke(MethodUtil.java:72)
sun.reflect.GeneratedMethodAccessor8.invoke(Unknown Source)
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
java.lang.reflect.Method.invoke(Method.java:498)
sun.reflect.misc.MethodUtil.invoke(MethodUtil.java:276)
com.sun.jmx.mbeanserver.ConvertingMethod.invokeWithOpenReturn(ConvertingMethod.java:193)
com.sun.jmx.mbeanserver.ConvertingMethod.invokeWithOpenReturn(ConvertingMethod.java:175)
com.sun.jmx.mbeanserver.MXBeanIntrospector.invokeM2(MXBeanIntrospector.java:117)
com.sun.jmx.mbeanserver.MXBeanIntrospector.invokeM2(MXBeanIntrospector.java:54)
com.sun.jmx.mbeanserver.MBeanIntrospector.invokeM(MBeanIntrospector.java:237)
com.sun.jmx.mbeanserver.PerInterface.invoke(PerInterface.java:138)
com.sun.jmx.mbeanserver.MBeanSupport.invoke(MBeanSupport.java:252)
javax.management.StandardMBean.invoke(StandardMBean.java:405)
com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.invoke(DefaultMBeanServerInterceptor.java:819)
com.sun.jmx.mbeanserver.JmxMBeanServer.invoke(JmxMBeanServer.java:801)
javax.management.remote.rmi.RMIConnectionImpl.doOperation(RMIConnectionImpl.java:1468)
javax.management.remote.rmi.RMIConnectionImpl.access$300(RMIConnectionImpl.java:76)
javax.management.remote.rmi.RMIConnectionImpl$PrivilegedOperation.run(RMIConnectionImpl.java:1309)
javax.management.remote.rmi.RMIConnectionImpl.doPrivilegedOperation(RMIConnectionImpl.java:1401)
javax.management.remote.rmi.RMIConnectionImpl.invoke(RMIConnectionImpl.java:829)
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32

检测不出死锁的情况

在这里插入图片描述


认识一下javac,将远代码转变为字节码

在这里插入图片描述


Java的执行过程整体可以分为两个部分,第一步由javac将源码编译成字节码,在这个过程中会进行词法分析、语法分析、语义分析,编译原理中这部分的编译称为前端编译。接下来无需编译直接逐条将字节码解释执行,在解释执行的过程中,虚拟机同时对程序运行的信息进行收集,在这些信息的基础上,编译器会逐渐发挥作用,它会进行后端编译——把字节码编译成机器码,但不是所有的代码都会被编译,只有被JVM认定为的热点代码,才可能被编译。

编译器

JVM中集成了两种编译器,Client Compiler和Server Compiler,
Client Compiler: 注重启动速度和局部的优化
HotSpot VM带有一个Client Compiler C1编译器。这种编译器启动速度快,但是性能比较Server Compiler来说会差一些。C1会做三件事:
Server Compiler: 更加关注全局的优化,性能会更好
Server Compiler主要关注一些编译耗时较长的全局优化,甚至会还会根据程序运行的信息进行一些不可靠的激进优化。这种编译器的启动时间长,适用于长时间运行的后台程序,它的性能通常比Client Compiler高30%以上。目前,Hotspot虚拟机中使用的Server Compiler有两种:C2和Graal。

src\share\classes\sun\tools\javac
在这里插入图片描述





静态变量
在这里插入图片描述

Java对象模型: OOP-Klass模型

在JVM中,并没有直接将Java对象映射成C++对象,而是采用了oop-klass模型,主要是不希望每个对象中都包含有一份虚函数表,其中:
** OOP(Ordinary Object Point),表示对象的实例信息**
** Klass,是Java类的在C++中的表示,用来描述Java类的信息**
简单地说,一个Java类在JVM中被拆分为了两个部分:数据和描述信息,分别对应OOP和Klass。
在具体的JVM源码中,当加载一个Class时,会创建一个InstanceKlass对象,实例化的对象则对应InstanceOopDesc,其中InstanceKlass存放在元空间,InstanceOopDesc存放在堆中。

HotSpot类模型之InstanceKlass

**InstanceKlass.cpp **
instanceKlass.cpp

/*
 * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 *
 * This code is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License version 2 only, as
 * published by the Free Software Foundation.
 *
 * This code is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * version 2 for more details (a copy is included in the LICENSE file that
 * accompanied this code).
 *
 * You should have received a copy of the GNU General Public License version
 * 2 along with this work; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 *
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 * or visit www.oracle.com if you need additional information or have any
 * questions.
 *
 */

#include "precompiled.hpp"
#include "classfile/javaClasses.hpp"
#include "classfile/systemDictionary.hpp"
#include "classfile/systemDictionaryShared.hpp"
#include "classfile/verifier.hpp"
#include "classfile/vmSymbols.hpp"
#include "compiler/compileBroker.hpp"
#include "gc_implementation/shared/markSweep.inline.hpp"
#include "gc_interface/collectedHeap.inline.hpp"
#include "interpreter/oopMapCache.hpp"
#include "interpreter/rewriter.hpp"
#include "jvmtifiles/jvmti.h"
#include "memory/genOopClosures.inline.hpp"
#include "memory/heapInspection.hpp"
#include "memory/iterator.inline.hpp"
#include "memory/metadataFactory.hpp"
#include "memory/oopFactory.hpp"
#include "oops/fieldStreams.hpp"
#include "oops/instanceClassLoaderKlass.hpp"
#include "oops/instanceKlass.hpp"
#include "oops/instanceMirrorKlass.hpp"
#include "oops/instanceOop.hpp"
#include "oops/klass.inline.hpp"
#include "oops/method.hpp"
#include "oops/oop.inline.hpp"
#include "oops/symbol.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/jvmtiRedefineClassesTrace.hpp"
#include "prims/jvmtiRedefineClasses.hpp"
#include "prims/jvmtiThreadState.hpp"
#include "prims/methodComparator.hpp"
#include "runtime/fieldDescriptor.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/javaCalls.hpp"
#include "runtime/mutexLocker.hpp"
#include "runtime/orderAccess.inline.hpp"
#include "runtime/thread.inline.hpp"
#include "services/classLoadingService.hpp"
#include "services/threadService.hpp"
#include "utilities/dtrace.hpp"
#include "utilities/macros.hpp"
#if INCLUDE_ALL_GCS
#include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
#include "gc_implementation/g1/g1OopClosures.inline.hpp"
#include "gc_implementation/g1/g1RemSet.inline.hpp"
#include "gc_implementation/g1/heapRegionManager.inline.hpp"
#include "gc_implementation/parNew/parOopClosures.inline.hpp"
#include "gc_implementation/parallelScavenge/parallelScavengeHeap.inline.hpp"
#include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
#include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
#include "oops/oop.pcgc.inline.hpp"
#endif // INCLUDE_ALL_GCS
#ifdef COMPILER1
#include "c1/c1_Compiler.hpp"
#endif
#if INCLUDE_JFR
#include "jfr/jfrEvents.hpp"
#endif


PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC

#ifdef DTRACE_ENABLED

#ifndef USDT2

HS_DTRACE_PROBE_DECL4(hotspot, class__initialization__required,
  char*, intptr_t, oop, intptr_t);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__recursive,
  char*, intptr_t, oop, intptr_t, int);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__concurrent,
  char*, intptr_t, oop, intptr_t, int);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__erroneous,
  char*, intptr_t, oop, intptr_t, int);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__super__failed,
  char*, intptr_t, oop, intptr_t, int);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__clinit,
  char*, intptr_t, oop, intptr_t, int);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__error,
  char*, intptr_t, oop, intptr_t, int);
HS_DTRACE_PROBE_DECL5(hotspot, class__initialization__end,
  char*, intptr_t, oop, intptr_t, int);

#define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)          \
  {                                                              \
    char* data = NULL;                                           \
    int len = 0;                                                 \
    Symbol* name = (clss)->name();                               \
    if (name != NULL) {                                          \
      data = (char*)name->bytes();                               \
      len = name->utf8_length();                                 \
    }                                                            \
    HS_DTRACE_PROBE4(hotspot, class__initialization__##type,     \
      data, len, (void *)(clss)->class_loader(), thread_type);           \
  }

#define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \
  {                                                              \
    char* data = NULL;                                           \
    int len = 0;                                                 \
    Symbol* name = (clss)->name();                               \
    if (name != NULL) {                                          \
      data = (char*)name->bytes();                               \
      len = name->utf8_length();                                 \
    }                                                            \
    HS_DTRACE_PROBE5(hotspot, class__initialization__##type,     \
      data, len, (void *)(clss)->class_loader(), thread_type, wait);     \
  }
#else /* USDT2 */

#define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
#define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
#define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
#define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
#define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
#define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
#define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
#define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
#define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)          \
  {                                                              \
    char* data = NULL;                                           \
    int len = 0;                                                 \
    Symbol* name = (clss)->name();                               \
    if (name != NULL) {                                          \
      data = (char*)name->bytes();                               \
      len = name->utf8_length();                                 \
    }                                                            \
    HOTSPOT_CLASS_INITIALIZATION_##type(                         \
      data, len, (void *)(clss)->class_loader(), thread_type); \
  }

#define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait) \
  {                                                              \
    char* data = NULL;                                           \
    int len = 0;                                                 \
    Symbol* name = (clss)->name();                               \
    if (name != NULL) {                                          \
      data = (char*)name->bytes();                               \
      len = name->utf8_length();                                 \
    }                                                            \
    HOTSPOT_CLASS_INITIALIZATION_##type(                         \
      data, len, (void *)(clss)->class_loader(), thread_type, wait); \
  }
#endif /* USDT2 */

#else //  ndef DTRACE_ENABLED

#define DTRACE_CLASSINIT_PROBE(type, clss, thread_type)
#define DTRACE_CLASSINIT_PROBE_WAIT(type, clss, thread_type, wait)

#endif //  ndef DTRACE_ENABLED

volatile int InstanceKlass::_total_instanceKlass_count = 0;

InstanceKlass* InstanceKlass::allocate_instance_klass(
                                              ClassLoaderData* loader_data,
                                              int vtable_len,
                                              int itable_len,
                                              int static_field_size,
                                              int nonstatic_oop_map_size,
                                              ReferenceType rt,
                                              AccessFlags access_flags,
                                              Symbol* name,
                                              Klass* super_klass,
                                              bool is_anonymous,
                                              TRAPS) {

  int size = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size,
                                 access_flags.is_interface(), is_anonymous);

  // Allocation
  InstanceKlass* ik;
  if (rt == REF_NONE) {
    if (name == vmSymbols::java_lang_Class()) {
      ik = new (loader_data, size, THREAD) InstanceMirrorKlass(
        vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
        access_flags, is_anonymous);
    } else if (name == vmSymbols::java_lang_ClassLoader() ||
          (SystemDictionary::ClassLoader_klass_loaded() &&
          super_klass != NULL &&
          super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass()))) {
      ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(
        vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
        access_flags, is_anonymous);
    } else {
      // normal class
      ik = new (loader_data, size, THREAD) InstanceKlass(
        vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
        access_flags, is_anonymous);
    }
  } else {
    // reference klass
    ik = new (loader_data, size, THREAD) InstanceRefKlass(
        vtable_len, itable_len, static_field_size, nonstatic_oop_map_size, rt,
        access_flags, is_anonymous);
  }

  // Check for pending exception before adding to the loader data and incrementing
  // class count.  Can get OOM here.
  if (HAS_PENDING_EXCEPTION) {
    return NULL;
  }

  // Add all classes to our internal class loader list here,
  // including classes in the bootstrap (NULL) class loader.
  loader_data->add_class(ik);

  Atomic::inc(&_total_instanceKlass_count);
  return ik;
}


// copy method ordering from resource area to Metaspace
void InstanceKlass::copy_method_ordering(intArray* m, TRAPS) {
  if (m != NULL) {
    // allocate a new array and copy contents (memcpy?)
    _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
    for (int i = 0; i < m->length(); i++) {
      _method_ordering->at_put(i, m->at(i));
    }
  } else {
    _method_ordering = Universe::the_empty_int_array();
  }
}

// create a new array of vtable_indices for default methods
Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
  Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
  assert(default_vtable_indices() == NULL, "only create once");
  set_default_vtable_indices(vtable_indices);
  return vtable_indices;
}

InstanceKlass::InstanceKlass(int vtable_len,
                             int itable_len,
                             int static_field_size,
                             int nonstatic_oop_map_size,
                             ReferenceType rt,
                             AccessFlags access_flags,
                             bool is_anonymous) {
  No_Safepoint_Verifier no_safepoint; // until k becomes parsable

  int iksize = InstanceKlass::size(vtable_len, itable_len, nonstatic_oop_map_size,
                                   access_flags.is_interface(), is_anonymous);

  set_vtable_length(vtable_len);
  set_itable_length(itable_len);
  set_static_field_size(static_field_size);
  set_nonstatic_oop_map_size(nonstatic_oop_map_size);
  set_access_flags(access_flags);
  _misc_flags = 0;  // initialize to zero
  set_is_anonymous(is_anonymous);
  assert(size() == iksize, "wrong size for object");

  set_array_klasses(NULL);
  set_methods(NULL);
  set_method_ordering(NULL);
  set_default_methods(NULL);
  set_default_vtable_indices(NULL);
  set_local_interfaces(NULL);
  set_transitive_interfaces(NULL);
  init_implementor();
  set_fields(NULL, 0);
  set_constants(NULL);
  set_class_loader_data(NULL);
  set_source_file_name_index(0);
  set_source_debug_extension(NULL, 0);
  set_array_name(NULL);
  set_inner_classes(NULL);
  set_static_oop_field_count(0);
  set_nonstatic_field_size(0);
  set_is_marked_dependent(false);
  set_has_unloaded_dependent(false);
  set_init_state(InstanceKlass::allocated);
  set_init_thread(NULL);
  set_init_state(allocated);
  set_reference_type(rt);
  set_oop_map_cache(NULL);
  set_jni_ids(NULL);
  set_osr_nmethods_head(NULL);
  set_breakpoints(NULL);
  init_previous_versions();
  set_generic_signature_index(0);
  release_set_methods_jmethod_ids(NULL);
  set_annotations(NULL);
  set_jvmti_cached_class_field_map(NULL);
  set_initial_method_idnum(0);
  _dependencies = NULL;
  set_jvmti_cached_class_field_map(NULL);
  set_cached_class_file(NULL);
  set_initial_method_idnum(0);
  set_minor_version(0);
  set_major_version(0);
  NOT_PRODUCT(_verify_count = 0;)

  // initialize the non-header words to zero
  intptr_t* p = (intptr_t*)this;
  for (int index = InstanceKlass::header_size(); index < iksize; index++) {
    p[index] = NULL_WORD;
  }

  // Set temporary value until parseClassFile updates it with the real instance
  // size.
  set_layout_helper(Klass::instance_layout_helper(0, true));
}


void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
                                       Array<Method*>* methods) {
  if (methods != NULL && methods != Universe::the_empty_method_array() &&
      !methods->is_shared()) {
    for (int i = 0; i < methods->length(); i++) {
      Method* method = methods->at(i);
      if (method == NULL) continue;  // maybe null if error processing
      // Only want to delete methods that are not executing for RedefineClasses.
      // The previous version will point to them so they're not totally dangling
      assert (!method->on_stack(), "shouldn't be called with methods on stack");
      MetadataFactory::free_metadata(loader_data, method);
    }
    MetadataFactory::free_array<Method*>(loader_data, methods);
  }
}

void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
                                          Klass* super_klass,
                                          Array<Klass*>* local_interfaces,
                                          Array<Klass*>* transitive_interfaces) {
  // Only deallocate transitive interfaces if not empty, same as super class
  // or same as local interfaces.  See code in parseClassFile.
  Array<Klass*>* ti = transitive_interfaces;
  if (ti != Universe::the_empty_klass_array() && ti != local_interfaces) {
    // check that the interfaces don't come from super class
    Array<Klass*>* sti = (super_klass == NULL) ? NULL :
                    InstanceKlass::cast(super_klass)->transitive_interfaces();
    if (ti != sti && ti != NULL && !ti->is_shared()) {
      MetadataFactory::free_array<Klass*>(loader_data, ti);
    }
  }

  // local interfaces can be empty
  if (local_interfaces != Universe::the_empty_klass_array() &&
      local_interfaces != NULL && !local_interfaces->is_shared()) {
    MetadataFactory::free_array<Klass*>(loader_data, local_interfaces);
  }
}

// This function deallocates the metadata and C heap pointers that the
// InstanceKlass points to.
void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {

  // Orphan the mirror first, CMS thinks it's still live.
  if (java_mirror() != NULL) {
    java_lang_Class::set_klass(java_mirror(), NULL);
  }

  // Need to take this class off the class loader data list.
  loader_data->remove_class(this);

  // The array_klass for this class is created later, after error handling.
  // For class redefinition, we keep the original class so this scratch class
  // doesn't have an array class.  Either way, assert that there is nothing
  // to deallocate.
  assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet");

  // Release C heap allocated data that this might point to, which includes
  // reference counting symbol names.
  release_C_heap_structures();

  deallocate_methods(loader_data, methods());
  set_methods(NULL);

  if (method_ordering() != NULL &&
      method_ordering() != Universe::the_empty_int_array() &&
      !method_ordering()->is_shared()) {
    MetadataFactory::free_array<int>(loader_data, method_ordering());
  }
  set_method_ordering(NULL);

  // default methods can be empty
  if (default_methods() != NULL &&
      default_methods() != Universe::the_empty_method_array() &&
      !default_methods()->is_shared()) {
    MetadataFactory::free_array<Method*>(loader_data, default_methods());
  }
  // Do NOT deallocate the default methods, they are owned by superinterfaces.
  set_default_methods(NULL);

  // default methods vtable indices can be empty
  if (default_vtable_indices() != NULL &&
      !default_vtable_indices()->is_shared()) {
    MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
  }
  set_default_vtable_indices(NULL);


  // This array is in Klass, but remove it with the InstanceKlass since
  // this place would be the only caller and it can share memory with transitive
  // interfaces.
  if (secondary_supers() != NULL &&
      secondary_supers() != Universe::the_empty_klass_array() &&
      secondary_supers() != transitive_interfaces() &&
      !secondary_supers()->is_shared()) {
    MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
  }
  set_secondary_supers(NULL);

  deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
  set_transitive_interfaces(NULL);
  set_local_interfaces(NULL);

  if (fields() != NULL && !fields()->is_shared()) {
    MetadataFactory::free_array<jushort>(loader_data, fields());
  }
  set_fields(NULL, 0);

  // If a method from a redefined class is using this constant pool, don't
  // delete it, yet.  The new class's previous version will point to this.
  if (constants() != NULL) {
    assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
    if (!constants()->is_shared()) {
      MetadataFactory::free_metadata(loader_data, constants());
    }
    // Delete any cached resolution errors for the constant pool
    SystemDictionary::delete_resolution_error(constants());

    set_constants(NULL);
  }

  if (inner_classes() != NULL &&
      inner_classes() != Universe::the_empty_short_array() &&
      !inner_classes()->is_shared()) {
    MetadataFactory::free_array<jushort>(loader_data, inner_classes());
  }
  set_inner_classes(NULL);

  // We should deallocate the Annotations instance if it's not in shared spaces.
  if (annotations() != NULL && !annotations()->is_shared()) {
    MetadataFactory::free_metadata(loader_data, annotations());
  }
  set_annotations(NULL);
}

bool InstanceKlass::should_be_initialized() const {
  return !is_initialized();
}

klassVtable* InstanceKlass::vtable() const {
  return new klassVtable(this, start_of_vtable(), vtable_length() / vtableEntry::size());
}

klassItable* InstanceKlass::itable() const {
  return new klassItable(instanceKlassHandle(this));
}

void InstanceKlass::eager_initialize(Thread *thread) {
  if (!EagerInitialization) return;

  if (this->is_not_initialized()) {
    // abort if the the class has a class initializer
    if (this->class_initializer() != NULL) return;

    // abort if it is java.lang.Object (initialization is handled in genesis)
    Klass* super = this->super();
    if (super == NULL) return;

    // abort if the super class should be initialized
    if (!InstanceKlass::cast(super)->is_initialized()) return;

    // call body to expose the this pointer
    instanceKlassHandle this_oop(thread, this);
    eager_initialize_impl(this_oop);
  }
}

// JVMTI spec thinks there are signers and protection domain in the
// instanceKlass.  These accessors pretend these fields are there.
// The hprof specification also thinks these fields are in InstanceKlass.
oop InstanceKlass::protection_domain() const {
  // return the protection_domain from the mirror
  return java_lang_Class::protection_domain(java_mirror());
}

// To remove these from requires an incompatible change and CCC request.
objArrayOop InstanceKlass::signers() const {
  // return the signers from the mirror
  return java_lang_Class::signers(java_mirror());
}

oop InstanceKlass::init_lock() const {
  // return the init lock from the mirror
  oop lock = java_lang_Class::init_lock(java_mirror());
  // Prevent reordering with any access of initialization state
  OrderAccess::loadload();
  assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state
         "only fully initialized state can have a null lock");
  return lock;
}

// Set the initialization lock to null so the object can be GC'ed.  Any racing
// threads to get this lock will see a null lock and will not lock.
// That's okay because they all check for initialized state after getting
// the lock and return.
void InstanceKlass::fence_and_clear_init_lock() {
  // make sure previous stores are all done, notably the init_state.
  OrderAccess::storestore();
  java_lang_Class::set_init_lock(java_mirror(), NULL);
  assert(!is_not_initialized(), "class must be initialized now");
}

void InstanceKlass::eager_initialize_impl(instanceKlassHandle this_oop) {
  EXCEPTION_MARK;
  oop init_lock = this_oop->init_lock();
  ObjectLocker ol(init_lock, THREAD, init_lock != NULL);

  // abort if someone beat us to the initialization
  if (!this_oop->is_not_initialized()) return;  // note: not equivalent to is_initialized()

  ClassState old_state = this_oop->init_state();
  link_class_impl(this_oop, true, THREAD);
  if (HAS_PENDING_EXCEPTION) {
    CLEAR_PENDING_EXCEPTION;
    // Abort if linking the class throws an exception.

    // Use a test to avoid redundantly resetting the state if there's
    // no change.  Set_init_state() asserts that state changes make
    // progress, whereas here we might just be spinning in place.
    if( old_state != this_oop->_init_state )
      this_oop->set_init_state (old_state);
  } else {
    // linking successfull, mark class as initialized
    this_oop->set_init_state (fully_initialized);
    this_oop->fence_and_clear_init_lock();
    // trace
    if (TraceClassInitialization) {
      ResourceMark rm(THREAD);
      tty->print_cr("[Initialized %s without side effects]", this_oop->external_name());
    }
  }
}


// See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
// process. The step comments refers to the procedure described in that section.
// Note: implementation moved to static method to expose the this pointer.
void InstanceKlass::initialize(TRAPS) {
  if (this->should_be_initialized()) {
    HandleMark hm(THREAD);
    instanceKlassHandle this_oop(THREAD, this);
    initialize_impl(this_oop, CHECK);
    // Note: at this point the class may be initialized
    //       OR it may be in the state of being initialized
    //       in case of recursive initialization!
  } else {
    assert(is_initialized(), "sanity check");
  }
}


bool InstanceKlass::verify_code(
    instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
  // 1) Verify the bytecodes
  Verifier::Mode mode =
    throw_verifyerror ? Verifier::ThrowException : Verifier::NoException;
  return Verifier::verify(this_oop, mode, this_oop->should_verify_class(), THREAD);
}


// Used exclusively by the shared spaces dump mechanism to prevent
// classes mapped into the shared regions in new VMs from appearing linked.

void InstanceKlass::unlink_class() {
  assert(is_linked(), "must be linked");
  _init_state = loaded;
}

void InstanceKlass::link_class(TRAPS) {
  assert(is_loaded(), "must be loaded");
  if (!is_linked()) {
    HandleMark hm(THREAD);
    instanceKlassHandle this_oop(THREAD, this);
    link_class_impl(this_oop, true, CHECK);
  }
}

// Called to verify that a class can link during initialization, without
// throwing a VerifyError.
bool InstanceKlass::link_class_or_fail(TRAPS) {
  assert(is_loaded(), "must be loaded");
  if (!is_linked()) {
    HandleMark hm(THREAD);
    instanceKlassHandle this_oop(THREAD, this);
    link_class_impl(this_oop, false, CHECK_false);
  }
  return is_linked();
}

bool InstanceKlass::link_class_impl(
    instanceKlassHandle this_oop, bool throw_verifyerror, TRAPS) {
  // check for error state.
  // This is checking for the wrong state.  If the state is initialization_error,
  // then this class *was* linked.  The CDS code does a try_link_class and uses
  // initialization_error to mark classes to not include in the archive during
  // DumpSharedSpaces.  This should be removed when the CDS bug is fixed.
  if (this_oop->is_in_error_state()) {
    ResourceMark rm(THREAD);
    THROW_MSG_(vmSymbols::java_lang_NoClassDefFoundError(),
               this_oop->external_name(), false);
  }
  // return if already verified
  if (this_oop->is_linked()) {
    return true;
  }

  // Timing
  // timer handles recursion
  assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
  JavaThread* jt = (JavaThread*)THREAD;

  // link super class before linking this class
  instanceKlassHandle super(THREAD, this_oop->super());
  if (super.not_null()) {
    if (super->is_interface()) {  // check if super class is an interface
      ResourceMark rm(THREAD);
      Exceptions::fthrow(
        THREAD_AND_LOCATION,
        vmSymbols::java_lang_IncompatibleClassChangeError(),
        "class %s has interface %s as super class",
        this_oop->external_name(),
        super->external_name()
      );
      return false;
    }

    link_class_impl(super, throw_verifyerror, CHECK_false);
  }

  // link all interfaces implemented by this class before linking this class
  Array<Klass*>* interfaces = this_oop->local_interfaces();
  int num_interfaces = interfaces->length();
  for (int index = 0; index < num_interfaces; index++) {
    HandleMark hm(THREAD);
    instanceKlassHandle ih(THREAD, interfaces->at(index));
    link_class_impl(ih, throw_verifyerror, CHECK_false);
  }

  // in case the class is linked in the process of linking its superclasses
  if (this_oop->is_linked()) {
    return true;
  }

  // trace only the link time for this klass that includes
  // the verification time
  PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
                             ClassLoader::perf_class_link_selftime(),
                             ClassLoader::perf_classes_linked(),
                             jt->get_thread_stat()->perf_recursion_counts_addr(),
                             jt->get_thread_stat()->perf_timers_addr(),
                             PerfClassTraceTime::CLASS_LINK);

  // verification & rewriting
  {
    oop init_lock = this_oop->init_lock();
    ObjectLocker ol(init_lock, THREAD, init_lock != NULL);
    // rewritten will have been set if loader constraint error found
    // on an earlier link attempt
    // don't verify or rewrite if already rewritten

    if (!this_oop->is_linked()) {
      if (!this_oop->is_rewritten()) {
        {
          // Timer includes any side effects of class verification (resolution,
          // etc), but not recursive entry into verify_code().
          PerfClassTraceTime timer(ClassLoader::perf_class_verify_time(),
                                   ClassLoader::perf_class_verify_selftime(),
                                   ClassLoader::perf_classes_verified(),
                                   jt->get_thread_stat()->perf_recursion_counts_addr(),
                                   jt->get_thread_stat()->perf_timers_addr(),
                                   PerfClassTraceTime::CLASS_VERIFY);
          bool verify_ok = verify_code(this_oop, throw_verifyerror, THREAD);
          if (!verify_ok) {
            return false;
          }
        }

        // Just in case a side-effect of verify linked this class already
        // (which can sometimes happen since the verifier loads classes
        // using custom class loaders, which are free to initialize things)
        if (this_oop->is_linked()) {
          return true;
        }

        // also sets rewritten
        this_oop->rewrite_class(CHECK_false);
      } else if (this_oop()->is_shared()) {
        ResourceMark rm(THREAD);
        char* message_buffer; // res-allocated by check_verification_dependencies
        Handle loader = this_oop()->class_loader();
        Handle pd     = this_oop()->protection_domain();
        bool verified = SystemDictionaryShared::check_verification_dependencies(this_oop(),
                        loader, pd, &message_buffer, THREAD);
        if (!verified) {
          THROW_MSG_(vmSymbols::java_lang_VerifyError(), message_buffer, false);
        }
      }

      // relocate jsrs and link methods after they are all rewritten
      this_oop->link_methods(CHECK_false);

      // Initialize the vtable and interface table after
      // methods have been rewritten since rewrite may
      // fabricate new Method*s.
      // also does loader constraint checking
      //
      // Initialize_vtable and initialize_itable need to be rerun for
      // a shared class if the class is not loaded by the NULL classloader.
      ClassLoaderData * loader_data = this_oop->class_loader_data();
      if (!(this_oop()->is_shared() &&
            loader_data->is_the_null_class_loader_data())) {
        ResourceMark rm(THREAD);
        this_oop->vtable()->initialize_vtable(true, CHECK_false);
        this_oop->itable()->initialize_itable(true, CHECK_false);
      }
#ifdef ASSERT
      else {
        ResourceMark rm(THREAD);
        this_oop->vtable()->verify(tty, true);
        // In case itable verification is ever added.
        // this_oop->itable()->verify(tty, true);
      }
#endif
      this_oop->set_init_state(linked);
      if (JvmtiExport::should_post_class_prepare()) {
        Thread *thread = THREAD;
        assert(thread->is_Java_thread(), "thread->is_Java_thread()");
        JvmtiExport::post_class_prepare((JavaThread *) thread, this_oop());
      }
    }
  }
  return true;
}


// Rewrite the byte codes of all of the methods of a class.
// The rewriter must be called exactly once. Rewriting must happen after
// verification but before the first method of the class is executed.
void InstanceKlass::rewrite_class(TRAPS) {
  assert(is_loaded(), "must be loaded");
  instanceKlassHandle this_oop(THREAD, this);
  if (this_oop->is_rewritten()) {
    assert(this_oop()->is_shared(), "rewriting an unshared class?");
    return;
  }
  Rewriter::rewrite(this_oop, CHECK);
  this_oop->set_rewritten();
}

// Now relocate and link method entry points after class is rewritten.
// This is outside is_rewritten flag. In case of an exception, it can be
// executed more than once.
void InstanceKlass::link_methods(TRAPS) {
  int len = methods()->length();
  for (int i = len-1; i >= 0; i--) {
    methodHandle m(THREAD, methods()->at(i));

    // Set up method entry points for compiler and interpreter    .
    m->link_method(m, CHECK);

    // This is for JVMTI and unrelated to relocator but the last thing we do
#ifdef ASSERT
    if (StressMethodComparator) {
      ResourceMark rm(THREAD);
      static int nmc = 0;
      for (int j = i; j >= 0 && j >= i-4; j--) {
        if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
        bool z = MethodComparator::methods_EMCP(m(),
                   methods()->at(j));
        if (j == i && !z) {
          tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
          assert(z, "method must compare equal to itself");
        }
      }
    }
#endif //ASSERT
  }
}

// Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
void InstanceKlass::initialize_super_interfaces(instanceKlassHandle this_k, TRAPS) {
  assert (this_k->has_default_methods(), "caller should have checked this");
  for (int i = 0; i < this_k->local_interfaces()->length(); ++i) {
    Klass* iface = this_k->local_interfaces()->at(i);
    InstanceKlass* ik = InstanceKlass::cast(iface);

    // Initialization is depth first search ie. we start with top of the inheritance tree
    // has_default_methods drives searching superinterfaces since it
    // means has_default_methods in its superinterface hierarchy
    if (ik->has_default_methods()) {
      ik->initialize_super_interfaces(ik, CHECK);
    }

    // Only initialize() interfaces that "declare" concrete methods.
    if (ik->should_be_initialized() && ik->declares_default_methods()) {
      ik->initialize(CHECK);
    }
  }
}

void InstanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
  // Make sure klass is linked (verified) before initialization
  // A class could already be verified, since it has been reflected upon.
  this_oop->link_class(CHECK);

  DTRACE_CLASSINIT_PROBE(required, InstanceKlass::cast(this_oop()), -1);

  bool wait = false;

  // refer to the JVM book page 47 for description of steps
  // Step 1
  {
    oop init_lock = this_oop->init_lock();
    ObjectLocker ol(init_lock, THREAD, init_lock != NULL);

    Thread *self = THREAD; // it's passed the current thread

    // Step 2
    // If we were to use wait() instead of waitInterruptibly() then
    // we might end up throwing IE from link/symbol resolution sites
    // that aren't expected to throw.  This would wreak havoc.  See 6320309.
    while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
        wait = true;
      ol.waitUninterruptibly(CHECK);
    }

    // Step 3
    if (this_oop->is_being_initialized() && this_oop->is_reentrant_initialization(self)) {
      DTRACE_CLASSINIT_PROBE_WAIT(recursive, InstanceKlass::cast(this_oop()), -1,wait);
      return;
    }

    // Step 4
    if (this_oop->is_initialized()) {
      DTRACE_CLASSINIT_PROBE_WAIT(concurrent, InstanceKlass::cast(this_oop()), -1,wait);
      return;
    }

    // Step 5
    if (this_oop->is_in_error_state()) {
      DTRACE_CLASSINIT_PROBE_WAIT(erroneous, InstanceKlass::cast(this_oop()), -1,wait);
      ResourceMark rm(THREAD);
      const char* desc = "Could not initialize class ";
      const char* className = this_oop->external_name();
      size_t msglen = strlen(desc) + strlen(className) + 1;
      char* message = NEW_RESOURCE_ARRAY(char, msglen);
      if (NULL == message) {
        // Out of memory: can't create detailed error message
        THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
      } else {
        jio_snprintf(message, msglen, "%s%s", desc, className);
        THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
      }
    }

    // Step 6
    this_oop->set_init_state(being_initialized);
    this_oop->set_init_thread(self);
  }

  // Step 7
  // Next, if C is a class rather than an interface, initialize its super class and super
  // interfaces.
  if (!this_oop->is_interface()) {
    Klass* super_klass = this_oop->super();
    if (super_klass != NULL && super_klass->should_be_initialized()) {
      super_klass->initialize(THREAD);
    }
    // If C implements any interfaces that declares a non-abstract, non-static method,
    // the initialization of C triggers initialization of its super interfaces.
    // Only need to recurse if has_default_methods which includes declaring and
    // inheriting default methods
    if (!HAS_PENDING_EXCEPTION && this_oop->has_default_methods()) {
      this_oop->initialize_super_interfaces(this_oop, THREAD);
    }

    // If any exceptions, complete abruptly, throwing the same exception as above.
    if (HAS_PENDING_EXCEPTION) {
      Handle e(THREAD, PENDING_EXCEPTION);
      CLEAR_PENDING_EXCEPTION;
      {
        EXCEPTION_MARK;
        // Locks object, set state, and notify all waiting threads
        this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
        CLEAR_PENDING_EXCEPTION;
      }
      DTRACE_CLASSINIT_PROBE_WAIT(super__failed, InstanceKlass::cast(this_oop()), -1,wait);
      THROW_OOP(e());
    }
  }

  // Step 8
  {
    assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
    JavaThread* jt = (JavaThread*)THREAD;
    DTRACE_CLASSINIT_PROBE_WAIT(clinit, InstanceKlass::cast(this_oop()), -1,wait);
    // Timer includes any side effects of class initialization (resolution,
    // etc), but not recursive entry into call_class_initializer().
    PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
                             ClassLoader::perf_class_init_selftime(),
                             ClassLoader::perf_classes_inited(),
                             jt->get_thread_stat()->perf_recursion_counts_addr(),
                             jt->get_thread_stat()->perf_timers_addr(),
                             PerfClassTraceTime::CLASS_CLINIT);
    this_oop->call_class_initializer(THREAD);
  }

  // Step 9
  if (!HAS_PENDING_EXCEPTION) {
    this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
    { ResourceMark rm(THREAD);
      debug_only(this_oop->vtable()->verify(tty, true);)
    }
  }
  else {
    // Step 10 and 11
    Handle e(THREAD, PENDING_EXCEPTION);
    CLEAR_PENDING_EXCEPTION;
    // JVMTI has already reported the pending exception
    // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
    JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
    {
      EXCEPTION_MARK;
      this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
      CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
      // JVMTI has already reported the pending exception
      // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
      JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
    }
    DTRACE_CLASSINIT_PROBE_WAIT(error, InstanceKlass::cast(this_oop()), -1,wait);
    if (e->is_a(SystemDictionary::Error_klass())) {
      THROW_OOP(e());
    } else {
      JavaCallArguments args(e);
      THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
                vmSymbols::throwable_void_signature(),
                &args);
    }
  }
  DTRACE_CLASSINIT_PROBE_WAIT(end, InstanceKlass::cast(this_oop()), -1,wait);
}


// Note: implementation moved to static method to expose the this pointer.
void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
  instanceKlassHandle kh(THREAD, this);
  set_initialization_state_and_notify_impl(kh, state, CHECK);
}

void InstanceKlass::set_initialization_state_and_notify_impl(instanceKlassHandle this_oop, ClassState state, TRAPS) {
  oop init_lock = this_oop->init_lock();
  if (init_lock != NULL) {
    ObjectLocker ol(init_lock, THREAD);
    this_oop->set_init_thread(NULL); // reset _init_thread before changing _init_state
    this_oop->set_init_state(state);
    this_oop->fence_and_clear_init_lock();
    ol.notify_all(CHECK);
  } else {
    assert(init_lock != NULL, "The initialization state should never be set twice");
    this_oop->set_init_thread(NULL); // reset _init_thread before changing _init_state
    this_oop->set_init_state(state);
  }
}

// The embedded _implementor field can only record one implementor.
// When there are more than one implementors, the _implementor field
// is set to the interface Klass* itself. Following are the possible
// values for the _implementor field:
//   NULL                  - no implementor
//   implementor Klass*    - one implementor
//   self                  - more than one implementor
//
// The _implementor field only exists for interfaces.
void InstanceKlass::add_implementor(Klass* k) {
  assert(Compile_lock->owned_by_self(), "");
  assert(is_interface(), "not interface");
  // Filter out my subinterfaces.
  // (Note: Interfaces are never on the subklass list.)
  if (InstanceKlass::cast(k)->is_interface()) return;

  // Filter out subclasses whose supers already implement me.
  // (Note: CHA must walk subclasses of direct implementors
  // in order to locate indirect implementors.)
  Klass* sk = InstanceKlass::cast(k)->super();
  if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this))
    // We only need to check one immediate superclass, since the
    // implements_interface query looks at transitive_interfaces.
    // Any supers of the super have the same (or fewer) transitive_interfaces.
    return;

  Klass* ik = implementor();
  if (ik == NULL) {
    set_implementor(k);
  } else if (ik != this) {
    // There is already an implementor. Use itself as an indicator of
    // more than one implementors.
    set_implementor(this);
  }

  // The implementor also implements the transitive_interfaces
  for (int index = 0; index < local_interfaces()->length(); index++) {
    InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k);
  }
}

void InstanceKlass::init_implementor() {
  if (is_interface()) {
    set_implementor(NULL);
  }
}


void InstanceKlass::process_interfaces(Thread *thread) {
  // link this class into the implementors list of every interface it implements
  Klass* this_as_klass_oop = this;
  for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
    assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
    InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i));
    assert(interf->is_interface(), "expected interface");
    interf->add_implementor(this_as_klass_oop);
  }
}

bool InstanceKlass::can_be_primary_super_slow() const {
  if (is_interface())
    return false;
  else
    return Klass::can_be_primary_super_slow();
}

GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots) {
  // The secondaries are the implemented interfaces.
  InstanceKlass* ik = InstanceKlass::cast(this);
  Array<Klass*>* interfaces = ik->transitive_interfaces();
  int num_secondaries = num_extra_slots + interfaces->length();
  if (num_secondaries == 0) {
    // Must share this for correct bootstrapping!
    set_secondary_supers(Universe::the_empty_klass_array());
    return NULL;
  } else if (num_extra_slots == 0) {
    // The secondary super list is exactly the same as the transitive interfaces.
    // Redefine classes has to be careful not to delete this!
    set_secondary_supers(interfaces);
    return NULL;
  } else {
    // Copy transitive interfaces to a temporary growable array to be constructed
    // into the secondary super list with extra slots.
    GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
    for (int i = 0; i < interfaces->length(); i++) {
      secondaries->push(interfaces->at(i));
    }
    return secondaries;
  }
}

bool InstanceKlass::compute_is_subtype_of(Klass* k) {
  if (k->is_interface()) {
    return implements_interface(k);
  } else {
    return Klass::compute_is_subtype_of(k);
  }
}

bool InstanceKlass::implements_interface(Klass* k) const {
  if (this == k) return true;
  assert(k->is_interface(), "should be an interface class");
  for (int i = 0; i < transitive_interfaces()->length(); i++) {
    if (transitive_interfaces()->at(i) == k) {
      return true;
    }
  }
  return false;
}

bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
  // Verify direct super interface
  if (this == k) return true;
  assert(k->is_interface(), "should be an interface class");
  for (int i = 0; i < local_interfaces()->length(); i++) {
    if (local_interfaces()->at(i) == k) {
      return true;
    }
  }
  return false;
}

objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
  if (length < 0) THROW_0(vmSymbols::java_lang_NegativeArraySizeException());
  if (length > arrayOopDesc::max_array_length(T_OBJECT)) {
    report_java_out_of_memory("Requested array size exceeds VM limit");
    JvmtiExport::post_array_size_exhausted();
    THROW_OOP_0(Universe::out_of_memory_error_array_size());
  }
  int size = objArrayOopDesc::object_size(length);
  Klass* ak = array_klass(n, CHECK_NULL);
  KlassHandle h_ak (THREAD, ak);
  objArrayOop o =
    (objArrayOop)CollectedHeap::array_allocate(h_ak, size, length, CHECK_NULL);
  return o;
}

instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
  if (TraceFinalizerRegistration) {
    tty->print("Registered ");
    i->print_value_on(tty);
    tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", (address)i);
  }
  instanceHandle h_i(THREAD, i);
  // Pass the handle as argument, JavaCalls::call expects oop as jobjects
  JavaValue result(T_VOID);
  JavaCallArguments args(h_i);
  methodHandle mh (THREAD, Universe::finalizer_register_method());
  JavaCalls::call(&result, mh, &args, CHECK_NULL);
  return h_i();
}

instanceOop InstanceKlass::allocate_instance(TRAPS) {
  bool has_finalizer_flag = has_finalizer(); // Query before possible GC
  int size = size_helper();  // Query before forming handle.

  KlassHandle h_k(THREAD, this);

  instanceOop i;

  i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
  if (has_finalizer_flag && !RegisterFinalizersAtInit) {
    i = register_finalizer(i, CHECK_NULL);
  }
  return i;
}

void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
  if (is_interface() || is_abstract()) {
    ResourceMark rm(THREAD);
    THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
              : vmSymbols::java_lang_InstantiationException(), external_name());
  }
  if (this == SystemDictionary::Class_klass()) {
    ResourceMark rm(THREAD);
    THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
              : vmSymbols::java_lang_IllegalAccessException(), external_name());
  }
}

Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
  instanceKlassHandle this_oop(THREAD, this);
  return array_klass_impl(this_oop, or_null, n, THREAD);
}

Klass* InstanceKlass::array_klass_impl(instanceKlassHandle this_oop, bool or_null, int n, TRAPS) {
  if (this_oop->array_klasses() == NULL) {
    if (or_null) return NULL;

    ResourceMark rm;
    JavaThread *jt = (JavaThread *)THREAD;
    {
      // Atomic creation of array_klasses
      MutexLocker mc(Compile_lock, THREAD);   // for vtables
      MutexLocker ma(MultiArray_lock, THREAD);

      // Check if update has already taken place
      if (this_oop->array_klasses() == NULL) {
        Klass*    k = ObjArrayKlass::allocate_objArray_klass(this_oop->class_loader_data(), 1, this_oop, CHECK_NULL);
        this_oop->set_array_klasses(k);
      }
    }
  }
  // _this will always be set at this point
  ObjArrayKlass* oak = (ObjArrayKlass*)this_oop->array_klasses();
  if (or_null) {
    return oak->array_klass_or_null(n);
  }
  return oak->array_klass(n, THREAD);
}

Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) {
  return array_klass_impl(or_null, 1, THREAD);
}

void InstanceKlass::call_class_initializer(TRAPS) {
  instanceKlassHandle ik (THREAD, this);
  call_class_initializer_impl(ik, THREAD);
}

static int call_class_initializer_impl_counter = 0;   // for debugging

Method* InstanceKlass::class_initializer() {
  Method* clinit = find_method(
      vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
  if (clinit != NULL && clinit->has_valid_initializer_flags()) {
    return clinit;
  }
  return NULL;
}

void InstanceKlass::call_class_initializer_impl(instanceKlassHandle this_oop, TRAPS) {
  if (ReplayCompiles &&
      (ReplaySuppressInitializers == 1 ||
       ReplaySuppressInitializers >= 2 && this_oop->class_loader() != NULL)) {
    // Hide the existence of the initializer for the purpose of replaying the compile
    return;
  }

  methodHandle h_method(THREAD, this_oop->class_initializer());
  assert(!this_oop->is_initialized(), "we cannot initialize twice");
  if (TraceClassInitialization) {
    tty->print("%d Initializing ", call_class_initializer_impl_counter++);
    this_oop->name()->print_value();
    tty->print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", (address)this_oop());
  }
  if (h_method() != NULL) {
    JavaCallArguments args; // No arguments
    JavaValue result(T_VOID);
    JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
  }
}


void InstanceKlass::mask_for(methodHandle method, int bci,
  InterpreterOopMap* entry_for) {
  // Dirty read, then double-check under a lock.
  if (_oop_map_cache == NULL) {
    // Otherwise, allocate a new one.
    MutexLocker x(OopMapCacheAlloc_lock);
    // First time use. Allocate a cache in C heap
    if (_oop_map_cache == NULL) {
      // Release stores from OopMapCache constructor before assignment
      // to _oop_map_cache. C++ compilers on ppc do not emit the
      // required memory barrier only because of the volatile
      // qualifier of _oop_map_cache.
      OrderAccess::release_store_ptr(&_oop_map_cache, new OopMapCache());
    }
  }
  // _oop_map_cache is constant after init; lookup below does is own locking.
  _oop_map_cache->lookup(method, bci, entry_for);
}


bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
  for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
    Symbol* f_name = fs.name();
    Symbol* f_sig  = fs.signature();
    if (f_name == name && f_sig == sig) {
      fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
      return true;
    }
  }
  return false;
}


Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
  const int n = local_interfaces()->length();
  for (int i = 0; i < n; i++) {
    Klass* intf1 = local_interfaces()->at(i);
    assert(intf1->is_interface(), "just checking type");
    // search for field in current interface
    if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
      assert(fd->is_static(), "interface field must be static");
      return intf1;
    }
    // search for field in direct superinterfaces
    Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
    if (intf2 != NULL) return intf2;
  }
  // otherwise field lookup fails
  return NULL;
}


Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
  // search order according to newest JVM spec (5.4.3.2, p.167).
  // 1) search for field in current klass
  if (find_local_field(name, sig, fd)) {
    return const_cast<InstanceKlass*>(this);
  }
  // 2) search for field recursively in direct superinterfaces
  { Klass* intf = find_interface_field(name, sig, fd);
    if (intf != NULL) return intf;
  }
  // 3) apply field lookup recursively if superclass exists
  { Klass* supr = super();
    if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd);
  }
  // 4) otherwise field lookup fails
  return NULL;
}


Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
  // search order according to newest JVM spec (5.4.3.2, p.167).
  // 1) search for field in current klass
  if (find_local_field(name, sig, fd)) {
    if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
  }
  // 2) search for field recursively in direct superinterfaces
  if (is_static) {
    Klass* intf = find_interface_field(name, sig, fd);
    if (intf != NULL) return intf;
  }
  // 3) apply field lookup recursively if superclass exists
  { Klass* supr = super();
    if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
  }
  // 4) otherwise field lookup fails
  return NULL;
}


bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
  for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
    if (fs.offset() == offset) {
      fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
      if (fd->is_static() == is_static) return true;
    }
  }
  return false;
}


bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
  Klass* klass = const_cast<InstanceKlass*>(this);
  while (klass != NULL) {
    if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
      return true;
    }
    klass = klass->super();
  }
  return false;
}


void InstanceKlass::methods_do(void f(Method* method)) {
  int len = methods()->length();
  for (int index = 0; index < len; index++) {
    Method* m = methods()->at(index);
    assert(m->is_method(), "must be method");
    f(m);
  }
}


void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
  for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
    if (fs.access_flags().is_static()) {
      fieldDescriptor& fd = fs.field_descriptor();
      cl->do_field(&fd);
    }
  }
}


void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
  instanceKlassHandle h_this(THREAD, this);
  do_local_static_fields_impl(h_this, f, mirror, CHECK);
}


void InstanceKlass::do_local_static_fields_impl(instanceKlassHandle this_k,
                             void f(fieldDescriptor* fd, Handle mirror, TRAPS), Handle mirror, TRAPS) {
  for (JavaFieldStream fs(this_k()); !fs.done(); fs.next()) {
    if (fs.access_flags().is_static()) {
      fieldDescriptor& fd = fs.field_descriptor();
      f(&fd, mirror, CHECK);
    }
  }
}


static int compare_fields_by_offset(int* a, int* b) {
  return a[0] - b[0];
}

void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
  InstanceKlass* super = superklass();
  if (super != NULL) {
    super->do_nonstatic_fields(cl);
  }
  fieldDescriptor fd;
  int length = java_fields_count();
  // In DebugInfo nonstatic fields are sorted by offset.
  int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass);
  int j = 0;
  for (int i = 0; i < length; i += 1) {
    fd.reinitialize(this, i);
    if (!fd.is_static()) {
      fields_sorted[j + 0] = fd.offset();
      fields_sorted[j + 1] = i;
      j += 2;
    }
  }
  if (j > 0) {
    length = j;
    // _sort_Fn is defined in growableArray.hpp.
    qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
    for (int i = 0; i < length; i += 2) {
      fd.reinitialize(this, fields_sorted[i + 1]);
      assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
      cl->do_field(&fd);
    }
  }
  FREE_C_HEAP_ARRAY(int, fields_sorted, mtClass);
}


void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) {
  if (array_klasses() != NULL)
    ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD);
}

void InstanceKlass::array_klasses_do(void f(Klass* k)) {
  if (array_klasses() != NULL)
    ArrayKlass::cast(array_klasses())->array_klasses_do(f);
}

#ifdef ASSERT
static int linear_search(Array<Method*>* methods, Symbol* name, Symbol* signature) {
  int len = methods->length();
  for (int index = 0; index < len; index++) {
    Method* m = methods->at(index);
    assert(m->is_method(), "must be method");
    if (m->signature() == signature && m->name() == name) {
       return index;
    }
  }
  return -1;
}
#endif

static int binary_search(Array<Method*>* methods, Symbol* name) {
  int len = methods->length();
  // methods are sorted, so do binary search
  int l = 0;
  int h = len - 1;
  while (l <= h) {
    int mid = (l + h) >> 1;
    Method* m = methods->at(mid);
    assert(m->is_method(), "must be method");
    int res = m->name()->fast_compare(name);
    if (res == 0) {
      return mid;
    } else if (res < 0) {
      l = mid + 1;
    } else {
      h = mid - 1;
    }
  }
  return -1;
}

// find_method looks up the name/signature in the local methods array
Method* InstanceKlass::find_method(Symbol* name, Symbol* signature) const {
  return find_method_impl(name, signature, find_overpass, find_static, find_private);
}

Method* InstanceKlass::find_method_impl(Symbol* name, Symbol* signature,
                                        OverpassLookupMode overpass_mode,
                                        StaticLookupMode static_mode,
                                        PrivateLookupMode private_mode) const {
  return InstanceKlass::find_method_impl(methods(), name, signature, overpass_mode, static_mode, private_mode);
}

// find_instance_method looks up the name/signature in the local methods array
// and skips over static methods
Method* InstanceKlass::find_instance_method(
    Array<Method*>* methods, Symbol* name, Symbol* signature) {
  Method* meth = InstanceKlass::find_method_impl(methods, name, signature,
                                                 find_overpass, skip_static, find_private);
  assert(((meth == NULL) || !meth->is_static()), "find_instance_method should have skipped statics");
  return meth;
}

// find_instance_method looks up the name/signature in the local methods array
// and skips over static methods
Method* InstanceKlass::find_instance_method(Symbol* name, Symbol* signature) {
    return InstanceKlass::find_instance_method(methods(), name, signature);
}

// Find looks up the name/signature in the local methods array
// and filters on the overpass, static and private flags
// This returns the first one found
// note that the local methods array can have up to one overpass, one static
// and one instance (private or not) with the same name/signature
Method* InstanceKlass::find_local_method(Symbol* name, Symbol* signature,
                                        OverpassLookupMode overpass_mode,
                                        StaticLookupMode static_mode,
                                        PrivateLookupMode private_mode) const {
  return InstanceKlass::find_method_impl(methods(), name, signature, overpass_mode, static_mode, private_mode);
}

// Find looks up the name/signature in the local methods array
// and filters on the overpass, static and private flags
// This returns the first one found
// note that the local methods array can have up to one overpass, one static
// and one instance (private or not) with the same name/signature
Method* InstanceKlass::find_local_method(Array<Method*>* methods,
                                        Symbol* name, Symbol* signature,
                                        OverpassLookupMode overpass_mode,
                                        StaticLookupMode static_mode,
                                        PrivateLookupMode private_mode) {
  return InstanceKlass::find_method_impl(methods, name, signature, overpass_mode, static_mode, private_mode);
}


// find_method looks up the name/signature in the local methods array
Method* InstanceKlass::find_method(
    Array<Method*>* methods, Symbol* name, Symbol* signature) {
  return InstanceKlass::find_method_impl(methods, name, signature, find_overpass, find_static, find_private);
}

Method* InstanceKlass::find_method_impl(
    Array<Method*>* methods, Symbol* name, Symbol* signature,
    OverpassLookupMode overpass_mode, StaticLookupMode static_mode,
    PrivateLookupMode private_mode) {
  int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
  return hit >= 0 ? methods->at(hit): NULL;
}

bool InstanceKlass::method_matches(Method* m, Symbol* signature, bool skipping_overpass, bool skipping_static, bool skipping_private) {
    return  ((m->signature() == signature) &&
            (!skipping_overpass || !m->is_overpass()) &&
            (!skipping_static || !m->is_static()) &&
            (!skipping_private || !m->is_private()));
}

// Used directly for default_methods to find the index into the
// default_vtable_indices, and indirectly by find_method
// find_method_index looks in the local methods array to return the index
// of the matching name/signature. If, overpass methods are being ignored,
// the search continues to find a potential non-overpass match.  This capability
// is important during method resolution to prefer a static method, for example,
// over an overpass method.
// There is the possibility in any _method's array to have the same name/signature
// for a static method, an overpass method and a local instance method
// To correctly catch a given method, the search criteria may need
// to explicitly skip the other two. For local instance methods, it
// is often necessary to skip private methods
int InstanceKlass::find_method_index(
    Array<Method*>* methods, Symbol* name, Symbol* signature,
    OverpassLookupMode overpass_mode, StaticLookupMode static_mode,
    PrivateLookupMode private_mode) {
  bool skipping_overpass = (overpass_mode == skip_overpass);
  bool skipping_static = (static_mode == skip_static);
  bool skipping_private = (private_mode == skip_private);
  int hit = binary_search(methods, name);
  if (hit != -1) {
    Method* m = methods->at(hit);

    // Do linear search to find matching signature.  First, quick check
    // for common case, ignoring overpasses if requested.
    if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) return hit;

    // search downwards through overloaded methods
    int i;
    for (i = hit - 1; i >= 0; --i) {
        Method* m = methods->at(i);
        assert(m->is_method(), "must be method");
        if (m->name() != name) break;
        if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) return i;
    }
    // search upwards
    for (i = hit + 1; i < methods->length(); ++i) {
        Method* m = methods->at(i);
        assert(m->is_method(), "must be method");
        if (m->name() != name) break;
        if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) return i;
    }
    // not found
#ifdef ASSERT
    int index = (skipping_overpass || skipping_static || skipping_private) ? -1 : linear_search(methods, name, signature);
    assert(index == -1, err_msg("binary search should have found entry %d", index));
#endif
  }
  return -1;
}
int InstanceKlass::find_method_by_name(Symbol* name, int* end) {
  return find_method_by_name(methods(), name, end);
}

int InstanceKlass::find_method_by_name(
    Array<Method*>* methods, Symbol* name, int* end_ptr) {
  assert(end_ptr != NULL, "just checking");
  int start = binary_search(methods, name);
  int end = start + 1;
  if (start != -1) {
    while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
    while (end < methods->length() && (methods->at(end))->name() == name) ++end;
    *end_ptr = end;
    return start;
  }
  return -1;
}

// uncached_lookup_method searches both the local class methods array and all
// superclasses methods arrays, skipping any overpass methods in superclasses.
Method* InstanceKlass::uncached_lookup_method(Symbol* name, Symbol* signature, OverpassLookupMode overpass_mode) const {
  OverpassLookupMode overpass_local_mode = overpass_mode;
  Klass* klass = const_cast<InstanceKlass*>(this);
  while (klass != NULL) {
    Method* method = InstanceKlass::cast(klass)->find_method_impl(name, signature, overpass_local_mode, find_static, find_private);
    if (method != NULL) {
      return method;
    }
    klass = InstanceKlass::cast(klass)->super();
    overpass_local_mode = skip_overpass;   // Always ignore overpass methods in superclasses
  }
  return NULL;
}

#ifdef ASSERT
// search through class hierarchy and return true if this class or
// one of the superclasses was redefined
bool InstanceKlass::has_redefined_this_or_super() const {
  const InstanceKlass* klass = this;
  while (klass != NULL) {
    if (klass->has_been_redefined()) {
      return true;
    }
    klass = InstanceKlass::cast(klass->super());
  }
  return false;
}
#endif

// lookup a method in the default methods list then in all transitive interfaces
// Do NOT return private or static methods
Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
                                                         Symbol* signature) const {
  Method* m = NULL;
  if (default_methods() != NULL) {
    m = find_method(default_methods(), name, signature);
  }
  // Look up interfaces
  if (m == NULL) {
    m = lookup_method_in_all_interfaces(name, signature, find_defaults);
  }
  return m;
}

// lookup a method in all the interfaces that this class implements
// Do NOT return private or static methods, new in JDK8 which are not externally visible
// They should only be found in the initial InterfaceMethodRef
Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
                                                       Symbol* signature,
                                                       DefaultsLookupMode defaults_mode) const {
  Array<Klass*>* all_ifs = transitive_interfaces();
  int num_ifs = all_ifs->length();
  InstanceKlass *ik = NULL;
  for (int i = 0; i < num_ifs; i++) {
    ik = InstanceKlass::cast(all_ifs->at(i));
    Method* m = ik->lookup_method(name, signature);
    if (m != NULL && m->is_public() && !m->is_static() &&
        ((defaults_mode != skip_defaults) || !m->is_default_method())) {
      return m;
    }
  }
  return NULL;
}

/* jni_id_for_impl for jfieldIds only */
JNIid* InstanceKlass::jni_id_for_impl(instanceKlassHandle this_oop, int offset) {
  MutexLocker ml(JfieldIdCreation_lock);
  // Retry lookup after we got the lock
  JNIid* probe = this_oop->jni_ids() == NULL ? NULL : this_oop->jni_ids()->find(offset);
  if (probe == NULL) {
    // Slow case, allocate new static field identifier
    probe = new JNIid(this_oop(), offset, this_oop->jni_ids());
    this_oop->set_jni_ids(probe);
  }
  return probe;
}


/* jni_id_for for jfieldIds only */
JNIid* InstanceKlass::jni_id_for(int offset) {
  JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
  if (probe == NULL) {
    probe = jni_id_for_impl(this, offset);
  }
  return probe;
}

u2 InstanceKlass::enclosing_method_data(int offset) {
  Array<jushort>* inner_class_list = inner_classes();
  if (inner_class_list == NULL) {
    return 0;
  }
  int length = inner_class_list->length();
  if (length % inner_class_next_offset == 0) {
    return 0;
  } else {
    int index = length - enclosing_method_attribute_size;
    assert(offset < enclosing_method_attribute_size, "invalid offset");
    return inner_class_list->at(index + offset);
  }
}

void InstanceKlass::set_enclosing_method_indices(u2 class_index,
                                                 u2 method_index) {
  Array<jushort>* inner_class_list = inner_classes();
  assert (inner_class_list != NULL, "_inner_classes list is not set up");
  int length = inner_class_list->length();
  if (length % inner_class_next_offset == enclosing_method_attribute_size) {
    int index = length - enclosing_method_attribute_size;
    inner_class_list->at_put(
      index + enclosing_method_class_index_offset, class_index);
    inner_class_list->at_put(
      index + enclosing_method_method_index_offset, method_index);
  }
}

// Lookup or create a jmethodID.
// This code is called by the VMThread and JavaThreads so the
// locking has to be done very carefully to avoid deadlocks
// and/or other cache consistency problems.
//
jmethodID InstanceKlass::get_jmethod_id(instanceKlassHandle ik_h, methodHandle method_h) {
  size_t idnum = (size_t)method_h->method_idnum();
  jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
  size_t length = 0;
  jmethodID id = NULL;

  // We use a double-check locking idiom here because this cache is
  // performance sensitive. In the normal system, this cache only
  // transitions from NULL to non-NULL which is safe because we use
  // release_set_methods_jmethod_ids() to advertise the new cache.
  // A partially constructed cache should never be seen by a racing
  // thread. We also use release_store_ptr() to save a new jmethodID
  // in the cache so a partially constructed jmethodID should never be
  // seen either. Cache reads of existing jmethodIDs proceed without a
  // lock, but cache writes of a new jmethodID requires uniqueness and
  // creation of the cache itself requires no leaks so a lock is
  // generally acquired in those two cases.
  //
  // If the RedefineClasses() API has been used, then this cache can
  // grow and we'll have transitions from non-NULL to bigger non-NULL.
  // Cache creation requires no leaks and we require safety between all
  // cache accesses and freeing of the old cache so a lock is generally
  // acquired when the RedefineClasses() API has been used.

  if (jmeths != NULL) {
    // the cache already exists
    if (!ik_h->idnum_can_increment()) {
      // the cache can't grow so we can just get the current values
      get_jmethod_id_length_value(jmeths, idnum, &length, &id);
    } else {
      // cache can grow so we have to be more careful
      if (Threads::number_of_threads() == 0 ||
          SafepointSynchronize::is_at_safepoint()) {
        // we're single threaded or at a safepoint - no locking needed
        get_jmethod_id_length_value(jmeths, idnum, &length, &id);
      } else {
        MutexLocker ml(JmethodIdCreation_lock);
        get_jmethod_id_length_value(jmeths, idnum, &length, &id);
      }
    }
  }
  // implied else:
  // we need to allocate a cache so default length and id values are good

  if (jmeths == NULL ||   // no cache yet
      length <= idnum ||  // cache is too short
      id == NULL) {       // cache doesn't contain entry

    // This function can be called by the VMThread so we have to do all
    // things that might block on a safepoint before grabbing the lock.
    // Otherwise, we can deadlock with the VMThread or have a cache
    // consistency issue. These vars keep track of what we might have
    // to free after the lock is dropped.
    jmethodID  to_dealloc_id     = NULL;
    jmethodID* to_dealloc_jmeths = NULL;

    // may not allocate new_jmeths or use it if we allocate it
    jmethodID* new_jmeths = NULL;
    if (length <= idnum) {
      // allocate a new cache that might be used
      size_t size = MAX2(idnum+1, (size_t)ik_h->idnum_allocated_count());
      new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
      memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
      // cache size is stored in element[0], other elements offset by one
      new_jmeths[0] = (jmethodID)size;
    }

    // allocate a new jmethodID that might be used
    jmethodID new_id = NULL;
    if (method_h->is_old() && !method_h->is_obsolete()) {
      // The method passed in is old (but not obsolete), we need to use the current version
      Method* current_method = ik_h->method_with_idnum((int)idnum);
      assert(current_method != NULL, "old and but not obsolete, so should exist");
      new_id = Method::make_jmethod_id(ik_h->class_loader_data(), current_method);
    } else {
      // It is the current version of the method or an obsolete method,
      // use the version passed in
      new_id = Method::make_jmethod_id(ik_h->class_loader_data(), method_h());
    }

    if (Threads::number_of_threads() == 0 ||
        SafepointSynchronize::is_at_safepoint()) {
      // we're single threaded or at a safepoint - no locking needed
      id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
                                          &to_dealloc_id, &to_dealloc_jmeths);
    } else {
      MutexLocker ml(JmethodIdCreation_lock);
      id = get_jmethod_id_fetch_or_update(ik_h, idnum, new_id, new_jmeths,
                                          &to_dealloc_id, &to_dealloc_jmeths);
    }

    // The lock has been dropped so we can free resources.
    // Free up either the old cache or the new cache if we allocated one.
    if (to_dealloc_jmeths != NULL) {
      FreeHeap(to_dealloc_jmeths);
    }
    // free up the new ID since it wasn't needed
    if (to_dealloc_id != NULL) {
      Method::destroy_jmethod_id(ik_h->class_loader_data(), to_dealloc_id);
    }
  }
  return id;
}


// Common code to fetch the jmethodID from the cache or update the
// cache with the new jmethodID. This function should never do anything
// that causes the caller to go to a safepoint or we can deadlock with
// the VMThread or have cache consistency issues.
//
jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
            instanceKlassHandle ik_h, size_t idnum, jmethodID new_id,
            jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
            jmethodID** to_dealloc_jmeths_p) {
  assert(new_id != NULL, "sanity check");
  assert(to_dealloc_id_p != NULL, "sanity check");
  assert(to_dealloc_jmeths_p != NULL, "sanity check");
  assert(Threads::number_of_threads() == 0 ||
         SafepointSynchronize::is_at_safepoint() ||
         JmethodIdCreation_lock->owned_by_self(), "sanity check");

  // reacquire the cache - we are locked, single threaded or at a safepoint
  jmethodID* jmeths = ik_h->methods_jmethod_ids_acquire();
  jmethodID  id     = NULL;
  size_t     length = 0;

  if (jmeths == NULL ||                         // no cache yet
      (length = (size_t)jmeths[0]) <= idnum) {  // cache is too short
    if (jmeths != NULL) {
      // copy any existing entries from the old cache
      for (size_t index = 0; index < length; index++) {
        new_jmeths[index+1] = jmeths[index+1];
      }
      *to_dealloc_jmeths_p = jmeths;  // save old cache for later delete
    }
    ik_h->release_set_methods_jmethod_ids(jmeths = new_jmeths);
  } else {
    // fetch jmethodID (if any) from the existing cache
    id = jmeths[idnum+1];
    *to_dealloc_jmeths_p = new_jmeths;  // save new cache for later delete
  }
  if (id == NULL) {
    // No matching jmethodID in the existing cache or we have a new
    // cache or we just grew the cache. This cache write is done here
    // by the first thread to win the foot race because a jmethodID
    // needs to be unique once it is generally available.
    id = new_id;

    // The jmethodID cache can be read while unlocked so we have to
    // make sure the new jmethodID is complete before installing it
    // in the cache.
    OrderAccess::release_store_ptr(&jmeths[idnum+1], id);
  } else {
    *to_dealloc_id_p = new_id; // save new id for later delete
  }
  return id;
}


// Common code to get the jmethodID cache length and the jmethodID
// value at index idnum if there is one.
//
void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache,
       size_t idnum, size_t *length_p, jmethodID* id_p) {
  assert(cache != NULL, "sanity check");
  assert(length_p != NULL, "sanity check");
  assert(id_p != NULL, "sanity check");

  // cache size is stored in element[0], other elements offset by one
  *length_p = (size_t)cache[0];
  if (*length_p <= idnum) {  // cache is too short
    *id_p = NULL;
  } else {
    *id_p = cache[idnum+1];  // fetch jmethodID (if any)
  }
}


// Lookup a jmethodID, NULL if not found.  Do no blocking, no allocations, no handles
jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
  size_t idnum = (size_t)method->method_idnum();
  jmethodID* jmeths = methods_jmethod_ids_acquire();
  size_t length;                                // length assigned as debugging crumb
  jmethodID id = NULL;
  if (jmeths != NULL &&                         // If there is a cache
      (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
    id = jmeths[idnum+1];                       // Look up the id (may be NULL)
  }
  return id;
}

int nmethodBucket::decrement() {
  return Atomic::add(-1, (volatile int *)&_count);
}

//
// Walk the list of dependent nmethods searching for nmethods which
// are dependent on the changes that were passed in and mark them for
// deoptimization.  Returns the number of nmethods found.
//
int InstanceKlass::mark_dependent_nmethods(DepChange& changes) {
  assert_locked_or_safepoint(CodeCache_lock);
  int found = 0;
  nmethodBucket* b = _dependencies;
  while (b != NULL) {
    nmethod* nm = b->get_nmethod();
    // since dependencies aren't removed until an nmethod becomes a zombie,
    // the dependency list may contain nmethods which aren't alive.
    if (b->count() > 0 && nm->is_alive() && !nm->is_marked_for_deoptimization() && nm->check_dependency_on(changes)) {
      if (TraceDependencies) {
        ResourceMark rm;
        tty->print_cr("Marked for deoptimization");
        tty->print_cr("  context = %s", this->external_name());
        changes.print();
        nm->print();
        nm->print_dependencies();
      }
      nm->mark_for_deoptimization();
      found++;
    }
    b = b->next();
  }
  return found;
}

void InstanceKlass::clean_dependent_nmethods() {
  assert_locked_or_safepoint(CodeCache_lock);

  if (has_unloaded_dependent()) {
    nmethodBucket* b = _dependencies;
    nmethodBucket* last = NULL;
    while (b != NULL) {
      assert(b->count() >= 0, err_msg("bucket count: %d", b->count()));

      nmethodBucket* next = b->next();

      if (b->count() == 0) {
        if (last == NULL) {
          _dependencies = next;
        } else {
          last->set_next(next);
        }
        delete b;
        // last stays the same.
      } else {
        last = b;
      }

      b = next;
    }
    set_has_unloaded_dependent(false);
  }
#ifdef ASSERT
  else {
    // Verification
    for (nmethodBucket* b = _dependencies; b != NULL; b = b->next()) {
      assert(b->count() >= 0, err_msg("bucket count: %d", b->count()));
      assert(b->count() != 0, "empty buckets need to be cleaned");
    }
  }
#endif
}

//
// Add an nmethodBucket to the list of dependencies for this nmethod.
// It's possible that an nmethod has multiple dependencies on this klass
// so a count is kept for each bucket to guarantee that creation and
// deletion of dependencies is consistent.
//
void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
  assert_locked_or_safepoint(CodeCache_lock);
  nmethodBucket* b = _dependencies;
  nmethodBucket* last = NULL;
  while (b != NULL) {
    if (nm == b->get_nmethod()) {
      b->increment();
      return;
    }
    b = b->next();
  }
  _dependencies = new nmethodBucket(nm, _dependencies);
}


//
// Decrement count of the nmethod in the dependency list and remove
// the bucket competely when the count goes to 0.  This method must
// find a corresponding bucket otherwise there's a bug in the
// recording of dependecies.
//
void InstanceKlass::remove_dependent_nmethod(nmethod* nm, bool delete_immediately) {
  assert_locked_or_safepoint(CodeCache_lock);
  nmethodBucket* b = _dependencies;
  nmethodBucket* last = NULL;
  while (b != NULL) {
    if (nm == b->get_nmethod()) {
      int val = b->decrement();
      guarantee(val >= 0, err_msg("Underflow: %d", val));
      if (val == 0) {
        if (delete_immediately) {
          if (last == NULL) {
            _dependencies = b->next();
          } else {
            last->set_next(b->next());
          }
          delete b;
        } else {
          // The deletion of this entry is deferred until a later, potentially parallel GC phase.
          set_has_unloaded_dependent(true);
        }
      }
      return;
    }
    last = b;
    b = b->next();
  }
#ifdef ASSERT
  tty->print_cr("### %s can't find dependent nmethod:", this->external_name());
  nm->print();
#endif // ASSERT
  ShouldNotReachHere();
}


#ifndef PRODUCT
void InstanceKlass::print_dependent_nmethods(bool verbose) {
  nmethodBucket* b = _dependencies;
  int idx = 0;
  while (b != NULL) {
    nmethod* nm = b->get_nmethod();
    tty->print("[%d] count=%d { ", idx++, b->count());
    if (!verbose) {
      nm->print_on(tty, "nmethod");
      tty->print_cr(" } ");
    } else {
      nm->print();
      nm->print_dependencies();
      tty->print_cr("--- } ");
    }
    b = b->next();
  }
}


bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
  nmethodBucket* b = _dependencies;
  while (b != NULL) {
    if (nm == b->get_nmethod()) {
#ifdef ASSERT
      int count = b->count();
      assert(count >= 0, err_msg("count shouldn't be negative: %d", count));
#endif
      return true;
    }
    b = b->next();
  }
  return false;
}
#endif //PRODUCT


// Garbage collection

#ifdef ASSERT
template <class T> void assert_is_in(T *p) {
  T heap_oop = oopDesc::load_heap_oop(p);
  if (!oopDesc::is_null(heap_oop)) {
    oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
    assert(Universe::heap()->is_in(o), "should be in heap");
  }
}
template <class T> void assert_is_in_closed_subset(T *p) {
  T heap_oop = oopDesc::load_heap_oop(p);
  if (!oopDesc::is_null(heap_oop)) {
    oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
    assert(Universe::heap()->is_in_closed_subset(o),
           err_msg("should be in closed *p " INTPTR_FORMAT " " INTPTR_FORMAT, (address)p, (address)o));
  }
}
template <class T> void assert_is_in_reserved(T *p) {
  T heap_oop = oopDesc::load_heap_oop(p);
  if (!oopDesc::is_null(heap_oop)) {
    oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
    assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
  }
}
template <class T> void assert_nothing(T *p) {}

#else
template <class T> void assert_is_in(T *p) {}
template <class T> void assert_is_in_closed_subset(T *p) {}
template <class T> void assert_is_in_reserved(T *p) {}
template <class T> void assert_nothing(T *p) {}
#endif // ASSERT

//
// Macros that iterate over areas of oops which are specialized on type of
// oop pointer either narrow or wide, depending on UseCompressedOops
//
// Parameters are:
//   T         - type of oop to point to (either oop or narrowOop)
//   start_p   - starting pointer for region to iterate over
//   count     - number of oops or narrowOops to iterate over
//   do_oop    - action to perform on each oop (it's arbitrary C code which
//               makes it more efficient to put in a macro rather than making
//               it a template function)
//   assert_fn - assert function which is template function because performance
//               doesn't matter when enabled.
#define InstanceKlass_SPECIALIZED_OOP_ITERATE( \
  T, start_p, count, do_oop,                \
  assert_fn)                                \
{                                           \
  T* p         = (T*)(start_p);             \
  T* const end = p + (count);               \
  while (p < end) {                         \
    (assert_fn)(p);                         \
    do_oop;                                 \
    ++p;                                    \
  }                                         \
}

#define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \
  T, start_p, count, do_oop,                \
  assert_fn)                                \
{                                           \
  T* const start = (T*)(start_p);           \
  T*       p     = start + (count);         \
  while (start < p) {                       \
    --p;                                    \
    (assert_fn)(p);                         \
    do_oop;                                 \
  }                                         \
}

#define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
  T, start_p, count, low, high,             \
  do_oop, assert_fn)                        \
{                                           \
  T* const l = (T*)(low);                   \
  T* const h = (T*)(high);                  \
  assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
         mask_bits((intptr_t)h, sizeof(T)-1) == 0,   \
         "bounded region must be properly aligned"); \
  T* p       = (T*)(start_p);               \
  T* end     = p + (count);                 \
  if (p < l) p = l;                         \
  if (end > h) end = h;                     \
  while (p < end) {                         \
    (assert_fn)(p);                         \
    do_oop;                                 \
    ++p;                                    \
  }                                         \
}


// The following macros call specialized macros, passing either oop or
// narrowOop as the specialization type.  These test the UseCompressedOops
// flag.
#define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn)            \
{                                                                        \
  /* Compute oopmap block range. The common case                         \
     is nonstatic_oop_map_size == 1. */                                  \
  OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
  OopMapBlock* const end_map = map + nonstatic_oop_map_count();          \
  if (UseCompressedOops) {                                               \
    while (map < end_map) {                                              \
      InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop,                   \
        obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
        do_oop, assert_fn)                                               \
      ++map;                                                             \
    }                                                                    \
  } else {                                                               \
    while (map < end_map) {                                              \
      InstanceKlass_SPECIALIZED_OOP_ITERATE(oop,                         \
        obj->obj_field_addr<oop>(map->offset()), map->count(),           \
        do_oop, assert_fn)                                               \
      ++map;                                                             \
    }                                                                    \
  }                                                                      \
}

#define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn)    \
{                                                                        \
  OopMapBlock* const start_map = start_of_nonstatic_oop_maps();          \
  OopMapBlock* map             = start_map + nonstatic_oop_map_count();  \
  if (UseCompressedOops) {                                               \
    while (start_map < map) {                                            \
      --map;                                                             \
      InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop,           \
        obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
        do_oop, assert_fn)                                               \
    }                                                                    \
  } else {                                                               \
    while (start_map < map) {                                            \
      --map;                                                             \
      InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop,                 \
        obj->obj_field_addr<oop>(map->offset()), map->count(),           \
        do_oop, assert_fn)                                               \
    }                                                                    \
  }                                                                      \
}

#define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop,    \
                                              assert_fn)                 \
{                                                                        \
  /* Compute oopmap block range. The common case is                      \
     nonstatic_oop_map_size == 1, so we accept the                       \
     usually non-existent extra overhead of examining                    \
     all the maps. */                                                    \
  OopMapBlock* map           = start_of_nonstatic_oop_maps();            \
  OopMapBlock* const end_map = map + nonstatic_oop_map_count();          \
  if (UseCompressedOops) {                                               \
    while (map < end_map) {                                              \
      InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop,           \
        obj->obj_field_addr<narrowOop>(map->offset()), map->count(),     \
        low, high,                                                       \
        do_oop, assert_fn)                                               \
      ++map;                                                             \
    }                                                                    \
  } else {                                                               \
    while (map < end_map) {                                              \
      InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop,                 \
        obj->obj_field_addr<oop>(map->offset()), map->count(),           \
        low, high,                                                       \
        do_oop, assert_fn)                                               \
      ++map;                                                             \
    }                                                                    \
  }                                                                      \
}

void InstanceKlass::oop_follow_contents(oop obj) {
  assert(obj != NULL, "can't follow the content of NULL object");
  MarkSweep::follow_klass(obj->klass());
  InstanceKlass_OOP_MAP_ITERATE( \
    obj, \
    MarkSweep::mark_and_push(p), \
    assert_is_in_closed_subset)
}

#if INCLUDE_ALL_GCS
void InstanceKlass::oop_follow_contents(ParCompactionManager* cm,
                                        oop obj) {
  assert(obj != NULL, "can't follow the content of NULL object");
  PSParallelCompact::follow_klass(cm, obj->klass());
  // Only mark the header and let the scan of the meta-data mark
  // everything else.
  InstanceKlass_OOP_MAP_ITERATE( \
    obj, \
    PSParallelCompact::mark_and_push(cm, p), \
    assert_is_in)
}
#endif // INCLUDE_ALL_GCS

// closure's do_metadata() method dictates whether the given closure should be
// applied to the klass ptr in the object header.

#define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
                                                                             \
int InstanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
  /* header */                                                          \
  if_do_metadata_checked(closure, nv_suffix) {                          \
    closure->do_klass##nv_suffix(obj->klass());                         \
  }                                                                     \
  InstanceKlass_OOP_MAP_ITERATE(                                        \
    obj,                                                                \
    SpecializationStats::                                               \
      record_do_oop_call##nv_suffix(SpecializationStats::ik);           \
    (closure)->do_oop##nv_suffix(p),                                    \
    assert_is_in_closed_subset)                                         \
  return size_helper();                                                 \
}

#if INCLUDE_ALL_GCS
#define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
                                                                                \
int InstanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj,                \
                                              OopClosureType* closure) {        \
  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik); \
                                                                                \
  assert_should_ignore_metadata(closure, nv_suffix);                            \
                                                                                \
  /* instance variables */                                                      \
  InstanceKlass_OOP_MAP_REVERSE_ITERATE(                                        \
    obj,                                                                        \
    SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::ik);\
    (closure)->do_oop##nv_suffix(p),                                            \
    assert_is_in_closed_subset)                                                 \
   return size_helper();                                                        \
}
#endif // INCLUDE_ALL_GCS

#define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
                                                                        \
int InstanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj,              \
                                                  OopClosureType* closure, \
                                                  MemRegion mr) {          \
  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::ik);\
  if_do_metadata_checked(closure, nv_suffix) {                           \
    if (mr.contains(obj)) {                                              \
      closure->do_klass##nv_suffix(obj->klass());                        \
    }                                                                    \
  }                                                                      \
  InstanceKlass_BOUNDED_OOP_MAP_ITERATE(                                 \
    obj, mr.start(), mr.end(),                                           \
    (closure)->do_oop##nv_suffix(p),                                     \
    assert_is_in_closed_subset)                                          \
  return size_helper();                                                  \
}

ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN)
ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN)
ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m)
#if INCLUDE_ALL_GCS
ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
#endif // INCLUDE_ALL_GCS

int InstanceKlass::oop_adjust_pointers(oop obj) {
  int size = size_helper();
  InstanceKlass_OOP_MAP_ITERATE( \
    obj, \
    MarkSweep::adjust_pointer(p), \
    assert_is_in)
  return size;
}

#if INCLUDE_ALL_GCS
void InstanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
  InstanceKlass_OOP_MAP_REVERSE_ITERATE( \
    obj, \
    if (PSScavenge::should_scavenge(p)) { \
      pm->claim_or_forward_depth(p); \
    }, \
    assert_nothing )
}

int InstanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
  int size = size_helper();
  InstanceKlass_OOP_MAP_ITERATE( \
    obj, \
    PSParallelCompact::adjust_pointer(p), \
    assert_is_in)
  return size;
}

#endif // INCLUDE_ALL_GCS

void InstanceKlass::clean_weak_instanceklass_links(BoolObjectClosure* is_alive) {
  clean_implementors_list(is_alive);
  clean_method_data(is_alive);

  clean_dependent_nmethods();
}

void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) {
  assert(class_loader_data()->is_alive(is_alive), "this klass should be live");
  if (is_interface()) {
    if (ClassUnloading) {
      Klass* impl = implementor();
      if (impl != NULL) {
        if (!impl->is_loader_alive(is_alive)) {
          // remove this guy
          Klass** klass = adr_implementor();
          assert(klass != NULL, "null klass");
          if (klass != NULL) {
            *klass = NULL;
          }
        }
      }
    }
  }
}

void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) {
  for (int m = 0; m < methods()->length(); m++) {
    MethodData* mdo = methods()->at(m)->method_data();
    if (mdo != NULL) {
      mdo->clean_method_data(is_alive);
    }
  }
}


static void remove_unshareable_in_class(Klass* k) {
  // remove klass's unshareable info
  k->remove_unshareable_info();
}

void InstanceKlass::remove_unshareable_info() {
  Klass::remove_unshareable_info();
  // Unlink the class
  if (is_linked()) {
    unlink_class();
  }
  init_implementor();

  constants()->remove_unshareable_info();

  for (int i = 0; i < methods()->length(); i++) {
    Method* m = methods()->at(i);
    m->remove_unshareable_info();
  }

  // do array classes also.
  array_klasses_do(remove_unshareable_in_class);
}

static void restore_unshareable_in_class(Klass* k, TRAPS) {
  // Array classes have null protection domain.
  // --> see ArrayKlass::complete_create_array_klass()
  k->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK);
}

void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) {
  Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
  instanceKlassHandle ik(THREAD, this);

  Array<Method*>* methods = ik->methods();
  int num_methods = methods->length();
  for (int index2 = 0; index2 < num_methods; ++index2) {
    methodHandle m(THREAD, methods->at(index2));
    m->restore_unshareable_info(CHECK);
  }
  if (JvmtiExport::has_redefined_a_class()) {
    // Reinitialize vtable because RedefineClasses may have changed some
    // entries in this vtable for super classes so the CDS vtable might
    // point to old or obsolete entries.  RedefineClasses doesn't fix up
    // vtables in the shared system dictionary, only the main one.
    // It also redefines the itable too so fix that too.
    ResourceMark rm(THREAD);
    ik->vtable()->initialize_vtable(false, CHECK);
    ik->itable()->initialize_itable(false, CHECK);
  }

  // restore constant pool resolved references
  ik->constants()->restore_unshareable_info(CHECK);

  ik->array_klasses_do(restore_unshareable_in_class, CHECK);
}

// returns true IFF is_in_error_state() has been changed as a result of this call.
bool InstanceKlass::check_sharing_error_state() {
  assert(DumpSharedSpaces, "should only be called during dumping");
  bool old_state = is_in_error_state();

  if (!is_in_error_state()) {
    bool bad = false;
    for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) {
      if (sup->is_in_error_state()) {
        bad = true;
        break;
      }
    }
    if (!bad) {
      Array<Klass*>* interfaces = transitive_interfaces();
      for (int i = 0; i < interfaces->length(); i++) {
        Klass* iface = interfaces->at(i);
        if (InstanceKlass::cast(iface)->is_in_error_state()) {
          bad = true;
          break;
        }
      }
    }

    if (bad) {
      set_in_error_state();
    }
  }

  return (old_state != is_in_error_state());
}

static void clear_all_breakpoints(Method* m) {
  m->clear_all_breakpoints();
}


void InstanceKlass::notify_unload_class(InstanceKlass* ik) {
  // notify the debugger
  if (JvmtiExport::should_post_class_unload()) {
    JvmtiExport::post_class_unload(ik);
  }

  // notify ClassLoadingService of class unload
  ClassLoadingService::notify_class_unloaded(ik);

#if INCLUDE_JFR
  assert(ik != NULL, "invariant");
  EventClassUnload event;
  event.set_unloadedClass(ik);
  event.set_definingClassLoader(ik->class_loader_data());
  event.commit();
#endif
}

void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) {
  // Clean up C heap
  ik->release_C_heap_structures();
  ik->constants()->release_C_heap_structures();
}

void InstanceKlass::release_C_heap_structures() {

  // Can't release the constant pool here because the constant pool can be
  // deallocated separately from the InstanceKlass for default methods and
  // redefine classes.

  // Deallocate oop map cache
  if (_oop_map_cache != NULL) {
    delete _oop_map_cache;
    _oop_map_cache = NULL;
  }

  // Deallocate JNI identifiers for jfieldIDs
  JNIid::deallocate(jni_ids());
  set_jni_ids(NULL);

  jmethodID* jmeths = methods_jmethod_ids_acquire();
  if (jmeths != (jmethodID*)NULL) {
    release_set_methods_jmethod_ids(NULL);
    FreeHeap(jmeths);
  }

  // Deallocate MemberNameTable
  {
    Mutex* lock_or_null = SafepointSynchronize::is_at_safepoint() ? NULL : MemberNameTable_lock;
    MutexLockerEx ml(lock_or_null, Mutex::_no_safepoint_check_flag);
    MemberNameTable* mnt = member_names();
    if (mnt != NULL) {
      delete mnt;
      set_member_names(NULL);
    }
  }

  // release dependencies
  nmethodBucket* b = _dependencies;
  _dependencies = NULL;
  while (b != NULL) {
    nmethodBucket* next = b->next();
    delete b;
    b = next;
  }

  // Deallocate breakpoint records
  if (breakpoints() != 0x0) {
    methods_do(clear_all_breakpoints);
    assert(breakpoints() == 0x0, "should have cleared breakpoints");
  }

  // deallocate the cached class file
  if (_cached_class_file != NULL) {
    os::free(_cached_class_file, mtClass);
    _cached_class_file = NULL;
  }

  // Decrement symbol reference counts associated with the unloaded class.
  if (_name != NULL) _name->decrement_refcount();
  // unreference array name derived from this class name (arrays of an unloaded
  // class can't be referenced anymore).
  if (_array_name != NULL)  _array_name->decrement_refcount();
  if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension, mtClass);

  assert(_total_instanceKlass_count >= 1, "Sanity check");
  Atomic::dec(&_total_instanceKlass_count);
}

void InstanceKlass::set_source_debug_extension(char* array, int length) {
  if (array == NULL) {
    _source_debug_extension = NULL;
  } else {
    // Adding one to the attribute length in order to store a null terminator
    // character could cause an overflow because the attribute length is
    // already coded with an u4 in the classfile, but in practice, it's
    // unlikely to happen.
    assert((length+1) > length, "Overflow checking");
    char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
    for (int i = 0; i < length; i++) {
      sde[i] = array[i];
    }
    sde[length] = '\0';
    _source_debug_extension = sde;
  }
}

address InstanceKlass::static_field_addr(int offset) {
  return (address)(offset + InstanceMirrorKlass::offset_of_static_fields() + cast_from_oop<intptr_t>(java_mirror()));
}


const char* InstanceKlass::signature_name() const {
  int hash_len = 0;
  char hash_buf[40];

  // If this is an anonymous class, append a hash to make the name unique
  if (is_anonymous()) {
    assert(EnableInvokeDynamic, "EnableInvokeDynamic was not set.");
    intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0;
    sprintf(hash_buf, "/" UINTX_FORMAT, (uintx)hash);
    hash_len = (int)strlen(hash_buf);
  }

  // Get the internal name as a c string
  const char* src = (const char*) (name()->as_C_string());
  const int src_length = (int)strlen(src);

  char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3);

  // Add L as type indicator
  int dest_index = 0;
  dest[dest_index++] = 'L';

  // Add the actual class name
  for (int src_index = 0; src_index < src_length; ) {
    dest[dest_index++] = src[src_index++];
  }

  // If we have a hash, append it
  for (int hash_index = 0; hash_index < hash_len; ) {
    dest[dest_index++] = hash_buf[hash_index++];
  }

  // Add the semicolon and the NULL
  dest[dest_index++] = ';';
  dest[dest_index] = '\0';
  return dest;
}

// different verisons of is_same_class_package
bool InstanceKlass::is_same_class_package(Klass* class2) {
  Klass* class1 = this;
  oop classloader1 = InstanceKlass::cast(class1)->class_loader();
  Symbol* classname1 = class1->name();

  if (class2->oop_is_objArray()) {
    class2 = ObjArrayKlass::cast(class2)->bottom_klass();
  }
  oop classloader2;
  if (class2->oop_is_instance()) {
    classloader2 = InstanceKlass::cast(class2)->class_loader();
  } else {
    assert(class2->oop_is_typeArray(), "should be type array");
    classloader2 = NULL;
  }
  Symbol* classname2 = class2->name();

  return InstanceKlass::is_same_class_package(classloader1, classname1,
                                              classloader2, classname2);
}

bool InstanceKlass::is_same_class_package(oop classloader2, Symbol* classname2) {
  Klass* class1 = this;
  oop classloader1 = InstanceKlass::cast(class1)->class_loader();
  Symbol* classname1 = class1->name();

  return InstanceKlass::is_same_class_package(classloader1, classname1,
                                              classloader2, classname2);
}

// return true if two classes are in the same package, classloader
// and classname information is enough to determine a class's package
bool InstanceKlass::is_same_class_package(oop class_loader1, Symbol* class_name1,
                                          oop class_loader2, Symbol* class_name2) {
  if (class_loader1 != class_loader2) {
    return false;
  } else if (class_name1 == class_name2) {
    return true;                // skip painful bytewise comparison
  } else {
    ResourceMark rm;

    // The Symbol*'s are in UTF8 encoding. Since we only need to check explicitly
    // for ASCII characters ('/', 'L', '['), we can keep them in UTF8 encoding.
    // Otherwise, we just compare jbyte values between the strings.
    const jbyte *name1 = class_name1->base();
    const jbyte *name2 = class_name2->base();

    const jbyte *last_slash1 = UTF8::strrchr(name1, class_name1->utf8_length(), '/');
    const jbyte *last_slash2 = UTF8::strrchr(name2, class_name2->utf8_length(), '/');

    if ((last_slash1 == NULL) || (last_slash2 == NULL)) {
      // One of the two doesn't have a package.  Only return true
      // if the other one also doesn't have a package.
      return last_slash1 == last_slash2;
    } else {
      // Skip over '['s
      if (*name1 == '[') {
        do {
          name1++;
        } while (*name1 == '[');
        if (*name1 != 'L') {
          // Something is terribly wrong.  Shouldn't be here.
          return false;
        }
      }
      if (*name2 == '[') {
        do {
          name2++;
        } while (*name2 == '[');
        if (*name2 != 'L') {
          // Something is terribly wrong.  Shouldn't be here.
          return false;
        }
      }

      // Check that package part is identical
      int length1 = last_slash1 - name1;
      int length2 = last_slash2 - name2;

      return UTF8::equal(name1, length1, name2, length2);
    }
  }
}

// Returns true iff super_method can be overridden by a method in targetclassname
// See JSL 3rd edition 8.4.6.1
// Assumes name-signature match
// "this" is InstanceKlass of super_method which must exist
// note that the InstanceKlass of the method in the targetclassname has not always been created yet
bool InstanceKlass::is_override(methodHandle super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) {
   // Private methods can not be overridden
   if (super_method->is_private()) {
     return false;
   }
   // If super method is accessible, then override
   if ((super_method->is_protected()) ||
       (super_method->is_public())) {
     return true;
   }
   // Package-private methods are not inherited outside of package
   assert(super_method->is_package_private(), "must be package private");
   return(is_same_class_package(targetclassloader(), targetclassname));
}

/* defined for now in jvm.cpp, for historical reasons *--
Klass* InstanceKlass::compute_enclosing_class_impl(instanceKlassHandle self,
                                                     Symbol*& simple_name_result, TRAPS) {
  ...
}
*/

// tell if two classes have the same enclosing class (at package level)
bool InstanceKlass::is_same_package_member_impl(instanceKlassHandle class1,
                                                Klass* class2_oop, TRAPS) {
  if (class2_oop == class1())                       return true;
  if (!class2_oop->oop_is_instance())  return false;
  instanceKlassHandle class2(THREAD, class2_oop);

  // must be in same package before we try anything else
  if (!class1->is_same_class_package(class2->class_loader(), class2->name()))
    return false;

  // As long as there is an outer1.getEnclosingClass,
  // shift the search outward.
  instanceKlassHandle outer1 = class1;
  for (;;) {
    // As we walk along, look for equalities between outer1 and class2.
    // Eventually, the walks will terminate as outer1 stops
    // at the top-level class around the original class.
    bool ignore_inner_is_member;
    Klass* next = outer1->compute_enclosing_class(&ignore_inner_is_member,
                                                    CHECK_false);
    if (next == NULL)  break;
    if (next == class2())  return true;
    outer1 = instanceKlassHandle(THREAD, next);
  }

  // Now do the same for class2.
  instanceKlassHandle outer2 = class2;
  for (;;) {
    bool ignore_inner_is_member;
    Klass* next = outer2->compute_enclosing_class(&ignore_inner_is_member,
                                                    CHECK_false);
    if (next == NULL)  break;
    // Might as well check the new outer against all available values.
    if (next == class1())  return true;
    if (next == outer1())  return true;
    outer2 = instanceKlassHandle(THREAD, next);
  }

  // If by this point we have not found an equality between the
  // two classes, we know they are in separate package members.
  return false;
}


jint InstanceKlass::compute_modifier_flags(TRAPS) const {
  jint access = access_flags().as_int();

  // But check if it happens to be member class.
  instanceKlassHandle ik(THREAD, this);
  InnerClassesIterator iter(ik);
  for (; !iter.done(); iter.next()) {
    int ioff = iter.inner_class_info_index();
    // Inner class attribute can be zero, skip it.
    // Strange but true:  JVM spec. allows null inner class refs.
    if (ioff == 0) continue;

    // only look at classes that are already loaded
    // since we are looking for the flags for our self.
    Symbol* inner_name = ik->constants()->klass_name_at(ioff);
    if ((ik->name() == inner_name)) {
      // This is really a member class.
      access = iter.inner_access_flags();
      break;
    }
  }
  // Remember to strip ACC_SUPER bit
  return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
}

jint InstanceKlass::jvmti_class_status() const {
  jint result = 0;

  if (is_linked()) {
    result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
  }

  if (is_initialized()) {
    assert(is_linked(), "Class status is not consistent");
    result |= JVMTI_CLASS_STATUS_INITIALIZED;
  }
  if (is_in_error_state()) {
    result |= JVMTI_CLASS_STATUS_ERROR;
  }
  return result;
}

Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) {
  itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
  int method_table_offset_in_words = ioe->offset()/wordSize;
  int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
                       / itableOffsetEntry::size();

  for (int cnt = 0 ; ; cnt ++, ioe ++) {
    // If the interface isn't implemented by the receiver class,
    // the VM should throw IncompatibleClassChangeError.
    if (cnt >= nof_interfaces) {
      THROW_NULL(vmSymbols::java_lang_IncompatibleClassChangeError());
    }

    Klass* ik = ioe->interface_klass();
    if (ik == holder) break;
  }

  itableMethodEntry* ime = ioe->first_method_entry(this);
  Method* m = ime[index].method();
  if (m == NULL) {
    THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
  }
  return m;
}


#if INCLUDE_JVMTI
// update default_methods for redefineclasses for methods that are
// not yet in the vtable due to concurrent subclass define and superinterface
// redefinition
// Note: those in the vtable, should have been updated via adjust_method_entries
void InstanceKlass::adjust_default_methods(InstanceKlass* holder, bool* trace_name_printed) {
  // search the default_methods for uses of either obsolete or EMCP methods
  if (default_methods() != NULL) {
    for (int index = 0; index < default_methods()->length(); index ++) {
      Method* old_method = default_methods()->at(index);
      if (old_method == NULL || old_method->method_holder() != holder || !old_method->is_old()) {
        continue; // skip uninteresting entries
      }
      assert(!old_method->is_deleted(), "default methods may not be deleted");

      Method* new_method = holder->method_with_idnum(old_method->orig_method_idnum());

      assert(new_method != NULL, "method_with_idnum() should not be NULL");
      assert(old_method != new_method, "sanity check");

      default_methods()->at_put(index, new_method);
      if (RC_TRACE_IN_RANGE(0x00100000, 0x00400000)) {
        if (!(*trace_name_printed)) {
          // RC_TRACE_MESG macro has an embedded ResourceMark
          RC_TRACE_MESG(("adjust: klassname=%s default methods from name=%s",
                         external_name(),
                         old_method->method_holder()->external_name()));
          *trace_name_printed = true;
        }
        RC_TRACE(0x00100000, ("default method update: %s(%s) ",
                              new_method->name()->as_C_string(),
                              new_method->signature()->as_C_string()));
      }
    }
  }
}
#endif // INCLUDE_JVMTI

// On-stack replacement stuff
void InstanceKlass::add_osr_nmethod(nmethod* n) {
#ifndef PRODUCT
  if (TieredCompilation) {
      nmethod * prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
      assert(prev == NULL || !prev->is_in_use(),
      "redundunt OSR recompilation detected. memory leak in CodeCache!");
  }
#endif
  // only one compilation can be active
  NEEDS_CLEANUP
  // This is a short non-blocking critical region, so the no safepoint check is ok.
  OsrList_lock->lock_without_safepoint_check();
  assert(n->is_osr_method(), "wrong kind of nmethod");
  n->set_osr_link(osr_nmethods_head());
  set_osr_nmethods_head(n);
  // Raise the highest osr level if necessary
  if (TieredCompilation) {
    Method* m = n->method();
    m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level()));
  }
  // Remember to unlock again
  OsrList_lock->unlock();

  // Get rid of the osr methods for the same bci that have lower levels.
  if (TieredCompilation) {
    for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
      nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
      if (inv != NULL && inv->is_in_use()) {
        inv->make_not_entrant();
      }
    }
  }
}


void InstanceKlass::remove_osr_nmethod(nmethod* n) {
  // This is a short non-blocking critical region, so the no safepoint check is ok.
  OsrList_lock->lock_without_safepoint_check();
  assert(n->is_osr_method(), "wrong kind of nmethod");
  nmethod* last = NULL;
  nmethod* cur  = osr_nmethods_head();
  int max_level = CompLevel_none;  // Find the max comp level excluding n
  Method* m = n->method();
  // Search for match
  while(cur != NULL && cur != n) {
    if (TieredCompilation && m == cur->method()) {
      // Find max level before n
      max_level = MAX2(max_level, cur->comp_level());
    }
    last = cur;
    cur = cur->osr_link();
  }
  nmethod* next = NULL;
  if (cur == n) {
    next = cur->osr_link();
    if (last == NULL) {
      // Remove first element
      set_osr_nmethods_head(next);
    } else {
      last->set_osr_link(next);
    }
  }
  n->set_osr_link(NULL);
  if (TieredCompilation) {
    cur = next;
    while (cur != NULL) {
      // Find max level after n
      if (m == cur->method()) {
        max_level = MAX2(max_level, cur->comp_level());
      }
      cur = cur->osr_link();
    }
    m->set_highest_osr_comp_level(max_level);
  }
  // Remember to unlock again
  OsrList_lock->unlock();
}

int InstanceKlass::mark_osr_nmethods(const Method* m) {
  // This is a short non-blocking critical region, so the no safepoint check is ok.
  MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag);
  nmethod* osr = osr_nmethods_head();
  int found = 0;
  while (osr != NULL) {
    assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
    if (osr->method() == m) {
      osr->mark_for_deoptimization();
      found++;
    }
    osr = osr->osr_link();
  }
  return found;
}

nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
  // This is a short non-blocking critical region, so the no safepoint check is ok.
  OsrList_lock->lock_without_safepoint_check();
  nmethod* osr = osr_nmethods_head();
  nmethod* best = NULL;
  while (osr != NULL) {
    assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
    // There can be a time when a c1 osr method exists but we are waiting
    // for a c2 version. When c2 completes its osr nmethod we will trash
    // the c1 version and only be able to find the c2 version. However
    // while we overflow in the c1 code at back branches we don't want to
    // try and switch to the same code as we are already running

    if (osr->method() == m &&
        (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
      if (match_level) {
        if (osr->comp_level() == comp_level) {
          // Found a match - return it.
          OsrList_lock->unlock();
          return osr;
        }
      } else {
        if (best == NULL || (osr->comp_level() > best->comp_level())) {
          if (osr->comp_level() == CompLevel_highest_tier) {
            // Found the best possible - return it.
            OsrList_lock->unlock();
            return osr;
          }
          best = osr;
        }
      }
    }
    osr = osr->osr_link();
  }
  OsrList_lock->unlock();

  assert(match_level == false || best == NULL, "shouldn't pick up anything if match_level is set");
  if (best != NULL && best->comp_level() >= comp_level) {
    return best;
  }
  return NULL;
}

oop InstanceKlass::add_member_name(Handle mem_name, bool intern) {
  jweak mem_name_wref = JNIHandles::make_weak_global(mem_name);
  MutexLocker ml(MemberNameTable_lock);
  DEBUG_ONLY(No_Safepoint_Verifier nsv);

  // Check if method has been redefined while taking out MemberNameTable_lock, if so
  // return false.  We cannot cache obsolete methods. They will crash when the function
  // is called!
  Method* method = (Method*)java_lang_invoke_MemberName::vmtarget(mem_name());
  if (method->is_obsolete()) {
    return NULL;
  } else if (method->is_old()) {
    // Replace method with redefined version
    java_lang_invoke_MemberName::set_vmtarget(mem_name(), method_with_idnum(method->method_idnum()));
  }

  if (_member_names == NULL) {
    _member_names = new (ResourceObj::C_HEAP, mtClass) MemberNameTable(idnum_allocated_count());
  }
  if (intern) {
    return _member_names->find_or_add_member_name(mem_name_wref);
  } else {
    return _member_names->add_member_name(mem_name_wref);
  }
}

// -----------------------------------------------------------------------------------------------------
// Printing

#ifndef PRODUCT

#define BULLET  " - "

static const char* state_names[] = {
  "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
};

static void print_vtable(intptr_t* start, int len, outputStream* st) {
  for (int i = 0; i < len; i++) {
    intptr_t e = start[i];
    st->print("%d : " INTPTR_FORMAT, i, e);
    if (e != 0 && ((Metadata*)e)->is_metaspace_object()) {
      st->print(" ");
      ((Metadata*)e)->print_value_on(st);
    }
    st->cr();
  }
}

void InstanceKlass::print_on(outputStream* st) const {
  assert(is_klass(), "must be klass");
  Klass::print_on(st);

  st->print(BULLET"instance size:     %d", size_helper());                        st->cr();
  st->print(BULLET"klass size:        %d", size());                               st->cr();
  st->print(BULLET"access:            "); access_flags().print_on(st);            st->cr();
  st->print(BULLET"state:             "); st->print_cr("%s", state_names[_init_state]);
  st->print(BULLET"name:              "); name()->print_value_on(st);             st->cr();
  st->print(BULLET"super:             "); super()->print_value_on_maybe_null(st); st->cr();
  st->print(BULLET"sub:               ");
  Klass* sub = subklass();
  int n;
  for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) {
    if (n < MaxSubklassPrintSize) {
      sub->print_value_on(st);
      st->print("   ");
    }
  }
  if (n >= MaxSubklassPrintSize) st->print("(%d more klasses...)", n - MaxSubklassPrintSize);
  st->cr();

  if (is_interface()) {
    st->print_cr(BULLET"nof implementors:  %d", nof_implementors());
    if (nof_implementors() == 1) {
      st->print_cr(BULLET"implementor:    ");
      st->print("   ");
      implementor()->print_value_on(st);
      st->cr();
    }
  }

  st->print(BULLET"arrays:            "); array_klasses()->print_value_on_maybe_null(st); st->cr();
  st->print(BULLET"methods:           "); methods()->print_value_on(st);                  st->cr();
  if (Verbose || WizardMode) {
    Array<Method*>* method_array = methods();
    for (int i = 0; i < method_array->length(); i++) {
      st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
    }
  }
  st->print(BULLET"method ordering:   "); method_ordering()->print_value_on(st);      st->cr();
  st->print(BULLET"default_methods:   "); default_methods()->print_value_on(st);      st->cr();
  if (Verbose && default_methods() != NULL) {
    Array<Method*>* method_array = default_methods();
    for (int i = 0; i < method_array->length(); i++) {
      st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
    }
  }
  if (default_vtable_indices() != NULL) {
    st->print(BULLET"default vtable indices:   "); default_vtable_indices()->print_value_on(st);       st->cr();
  }
  st->print(BULLET"local interfaces:  "); local_interfaces()->print_value_on(st);      st->cr();
  st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
  st->print(BULLET"constants:         "); constants()->print_value_on(st);         st->cr();
  if (class_loader_data() != NULL) {
    st->print(BULLET"class loader data:  ");
    class_loader_data()->print_value_on(st);
    st->cr();
  }
  st->print(BULLET"host class:        "); host_klass()->print_value_on_maybe_null(st); st->cr();
  if (source_file_name() != NULL) {
    st->print(BULLET"source file:       ");
    source_file_name()->print_value_on(st);
    st->cr();
  }
  if (source_debug_extension() != NULL) {
    st->print(BULLET"source debug extension:       ");
    st->print("%s", source_debug_extension());
    st->cr();
  }
  st->print(BULLET"class annotations:       "); class_annotations()->print_value_on(st); st->cr();
  st->print(BULLET"class type annotations:  "); class_type_annotations()->print_value_on(st); st->cr();
  st->print(BULLET"field annotations:       "); fields_annotations()->print_value_on(st); st->cr();
  st->print(BULLET"field type annotations:  "); fields_type_annotations()->print_value_on(st); st->cr();
  {
    bool have_pv = false;
    // previous versions are linked together through the InstanceKlass
    for (InstanceKlass* pv_node = _previous_versions;
         pv_node != NULL;
         pv_node = pv_node->previous_versions()) {
      if (!have_pv)
        st->print(BULLET"previous version:  ");
      have_pv = true;
      pv_node->constants()->print_value_on(st);
    }
    if (have_pv) st->cr();
  }

  if (generic_signature() != NULL) {
    st->print(BULLET"generic signature: ");
    generic_signature()->print_value_on(st);
    st->cr();
  }
  st->print(BULLET"inner classes:     "); inner_classes()->print_value_on(st);     st->cr();
  st->print(BULLET"java mirror:       "); java_mirror()->print_value_on(st);       st->cr();
  st->print(BULLET"vtable length      %d  (start addr: " INTPTR_FORMAT ")", vtable_length(), start_of_vtable());  st->cr();
  if (vtable_length() > 0 && (Verbose || WizardMode))  print_vtable(start_of_vtable(), vtable_length(), st);
  st->print(BULLET"itable length      %d (start addr: " INTPTR_FORMAT ")", itable_length(), start_of_itable()); st->cr();
  if (itable_length() > 0 && (Verbose || WizardMode))  print_vtable(start_of_itable(), itable_length(), st);
  st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
  FieldPrinter print_static_field(st);
  ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
  st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
  FieldPrinter print_nonstatic_field(st);
  ((InstanceKlass*)this)->do_nonstatic_fields(&print_nonstatic_field);

  st->print(BULLET"non-static oop maps: ");
  OopMapBlock* map     = start_of_nonstatic_oop_maps();
  OopMapBlock* end_map = map + nonstatic_oop_map_count();
  while (map < end_map) {
    st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
    map++;
  }
  st->cr();
}

#endif //PRODUCT

void InstanceKlass::print_value_on(outputStream* st) const {
  assert(is_klass(), "must be klass");
  if (Verbose || WizardMode)  access_flags().print_on(st);
  name()->print_value_on(st);
}

#ifndef PRODUCT

void FieldPrinter::do_field(fieldDescriptor* fd) {
  _st->print(BULLET);
   if (_obj == NULL) {
     fd->print_on(_st);
     _st->cr();
   } else {
     fd->print_on_for(_st, _obj);
     _st->cr();
   }
}


void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
  Klass::oop_print_on(obj, st);

  if (this == SystemDictionary::String_klass()) {
    typeArrayOop value  = java_lang_String::value(obj);
    juint        offset = java_lang_String::offset(obj);
    juint        length = java_lang_String::length(obj);
    if (value != NULL &&
        value->is_typeArray() &&
        offset          <= (juint) value->length() &&
        offset + length <= (juint) value->length()) {
      st->print(BULLET"string: ");
      java_lang_String::print(obj, st);
      st->cr();
      if (!WizardMode)  return;  // that is enough
    }
  }

  st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj));
  FieldPrinter print_field(st, obj);
  do_nonstatic_fields(&print_field);

  if (this == SystemDictionary::Class_klass()) {
    st->print(BULLET"signature: ");
    java_lang_Class::print_signature(obj, st);
    st->cr();
    Klass* mirrored_klass = java_lang_Class::as_Klass(obj);
    st->print(BULLET"fake entry for mirror: ");
    mirrored_klass->print_value_on_maybe_null(st);
    st->cr();
    Klass* array_klass = java_lang_Class::array_klass(obj);
    st->print(BULLET"fake entry for array: ");
    array_klass->print_value_on_maybe_null(st);
    st->cr();
    st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj));
    st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj));
    Klass* real_klass = java_lang_Class::as_Klass(obj);
    if (real_klass != NULL && real_klass->oop_is_instance()) {
      InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
    }
  } else if (this == SystemDictionary::MethodType_klass()) {
    st->print(BULLET"signature: ");
    java_lang_invoke_MethodType::print_signature(obj, st);
    st->cr();
  }
}

#endif //PRODUCT

void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
  st->print("a ");
  name()->print_value_on(st);
  obj->print_address_on(st);
  if (this == SystemDictionary::String_klass()
      && java_lang_String::value(obj) != NULL) {
    ResourceMark rm;
    int len = java_lang_String::length(obj);
    int plen = (len < 24 ? len : 12);
    char* str = java_lang_String::as_utf8_string(obj, 0, plen);
    st->print(" = \"%s\"", str);
    if (len > plen)
      st->print("...[%d]", len);
  } else if (this == SystemDictionary::Class_klass()) {
    Klass* k = java_lang_Class::as_Klass(obj);
    st->print(" = ");
    if (k != NULL) {
      k->print_value_on(st);
    } else {
      const char* tname = type2name(java_lang_Class::primitive_type(obj));
      st->print("%s", tname ? tname : "type?");
    }
  } else if (this == SystemDictionary::MethodType_klass()) {
    st->print(" = ");
    java_lang_invoke_MethodType::print_signature(obj, st);
  } else if (java_lang_boxing_object::is_instance(obj)) {
    st->print(" = ");
    java_lang_boxing_object::print(obj, st);
  } else if (this == SystemDictionary::LambdaForm_klass()) {
    oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
    if (vmentry != NULL) {
      st->print(" => ");
      vmentry->print_value_on(st);
    }
  } else if (this == SystemDictionary::MemberName_klass()) {
    Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
    if (vmtarget != NULL) {
      st->print(" = ");
      vmtarget->print_value_on(st);
    } else {
      java_lang_invoke_MemberName::clazz(obj)->print_value_on(st);
      st->print(".");
      java_lang_invoke_MemberName::name(obj)->print_value_on(st);
    }
  }
}

const char* InstanceKlass::internal_name() const {
  return external_name();
}

#if INCLUDE_SERVICES
// Size Statistics
void InstanceKlass::collect_statistics(KlassSizeStats *sz) const {
  Klass::collect_statistics(sz);

  sz->_inst_size  = HeapWordSize * size_helper();
  sz->_vtab_bytes = HeapWordSize * align_object_offset(vtable_length());
  sz->_itab_bytes = HeapWordSize * align_object_offset(itable_length());
  sz->_nonstatic_oopmap_bytes = HeapWordSize *
        ((is_interface() || is_anonymous()) ?
         align_object_offset(nonstatic_oop_map_size()) :
         nonstatic_oop_map_size());

  int n = 0;
  n += (sz->_methods_array_bytes         = sz->count_array(methods()));
  n += (sz->_method_ordering_bytes       = sz->count_array(method_ordering()));
  n += (sz->_local_interfaces_bytes      = sz->count_array(local_interfaces()));
  n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces()));
  n += (sz->_fields_bytes                = sz->count_array(fields()));
  n += (sz->_inner_classes_bytes         = sz->count_array(inner_classes()));
  sz->_ro_bytes += n;

  const ConstantPool* cp = constants();
  if (cp) {
    cp->collect_statistics(sz);
  }

  const Annotations* anno = annotations();
  if (anno) {
    anno->collect_statistics(sz);
  }

  const Array<Method*>* methods_array = methods();
  if (methods()) {
    for (int i = 0; i < methods_array->length(); i++) {
      Method* method = methods_array->at(i);
      if (method) {
        sz->_method_count ++;
        method->collect_statistics(sz);
      }
    }
  }
}
#endif // INCLUDE_SERVICES

// Verification

class VerifyFieldClosure: public OopClosure {
 protected:
  template <class T> void do_oop_work(T* p) {
    oop obj = oopDesc::load_decode_heap_oop(p);
    if (!obj->is_oop_or_null()) {
      tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p, (address)obj);
      Universe::print();
      guarantee(false, "boom");
    }
  }
 public:
  virtual void do_oop(oop* p)       { VerifyFieldClosure::do_oop_work(p); }
  virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
};

void InstanceKlass::verify_on(outputStream* st) {
#ifndef PRODUCT
  // Avoid redundant verifies, this really should be in product.
  if (_verify_count == Universe::verify_count()) return;
  _verify_count = Universe::verify_count();
#endif

  // Verify Klass
  Klass::verify_on(st);

  // Verify that klass is present in ClassLoaderData
  guarantee(class_loader_data()->contains_klass(this),
            "this class isn't found in class loader data");

  // Verify vtables
  if (is_linked()) {
    ResourceMark rm;
    // $$$ This used to be done only for m/s collections.  Doing it
    // always seemed a valid generalization.  (DLD -- 6/00)
    vtable()->verify(st);
  }

  // Verify first subklass
  if (subklass_oop() != NULL) {
    guarantee(subklass_oop()->is_klass(), "should be klass");
  }

  // Verify siblings
  Klass* super = this->super();
  Klass* sib = next_sibling();
  if (sib != NULL) {
    if (sib == this) {
      fatal(err_msg("subclass points to itself " PTR_FORMAT, sib));
    }

    guarantee(sib->is_klass(), "should be klass");
    guarantee(sib->super() == super, "siblings should have same superklass");
  }

  // Verify implementor fields
  Klass* im = implementor();
  if (im != NULL) {
    guarantee(is_interface(), "only interfaces should have implementor set");
    guarantee(im->is_klass(), "should be klass");
    guarantee(!im->is_interface() || im == this,
      "implementors cannot be interfaces");
  }

  // Verify local interfaces
  if (local_interfaces()) {
    Array<Klass*>* local_interfaces = this->local_interfaces();
    for (int j = 0; j < local_interfaces->length(); j++) {
      Klass* e = local_interfaces->at(j);
      guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
    }
  }

  // Verify transitive interfaces
  if (transitive_interfaces() != NULL) {
    Array<Klass*>* transitive_interfaces = this->transitive_interfaces();
    for (int j = 0; j < transitive_interfaces->length(); j++) {
      Klass* e = transitive_interfaces->at(j);
      guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
    }
  }

  // Verify methods
  if (methods() != NULL) {
    Array<Method*>* methods = this->methods();
    for (int j = 0; j < methods->length(); j++) {
      guarantee(methods->at(j)->is_method(), "non-method in methods array");
    }
    for (int j = 0; j < methods->length() - 1; j++) {
      Method* m1 = methods->at(j);
      Method* m2 = methods->at(j + 1);
      guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
    }
  }

  // Verify method ordering
  if (method_ordering() != NULL) {
    Array<int>* method_ordering = this->method_ordering();
    int length = method_ordering->length();
    if (JvmtiExport::can_maintain_original_method_order() ||
        ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) {
      guarantee(length == methods()->length(), "invalid method ordering length");
      jlong sum = 0;
      for (int j = 0; j < length; j++) {
        int original_index = method_ordering->at(j);
        guarantee(original_index >= 0, "invalid method ordering index");
        guarantee(original_index < length, "invalid method ordering index");
        sum += original_index;
      }
      // Verify sum of indices 0,1,...,length-1
      guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
    } else {
      guarantee(length == 0, "invalid method ordering length");
    }
  }

  // Verify default methods
  if (default_methods() != NULL) {
    Array<Method*>* methods = this->default_methods();
    for (int j = 0; j < methods->length(); j++) {
      guarantee(methods->at(j)->is_method(), "non-method in methods array");
    }
    for (int j = 0; j < methods->length() - 1; j++) {
      Method* m1 = methods->at(j);
      Method* m2 = methods->at(j + 1);
      guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
    }
  }

  // Verify JNI static field identifiers
  if (jni_ids() != NULL) {
    jni_ids()->verify(this);
  }

  // Verify other fields
  if (array_klasses() != NULL) {
    guarantee(array_klasses()->is_klass(), "should be klass");
  }
  if (constants() != NULL) {
    guarantee(constants()->is_constantPool(), "should be constant pool");
  }
  const Klass* host = host_klass();
  if (host != NULL) {
    guarantee(host->is_klass(), "should be klass");
  }
}

void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
  Klass::oop_verify_on(obj, st);
  VerifyFieldClosure blk;
  obj->oop_iterate_no_header(&blk);
}


// JNIid class for jfieldIDs only
// Note to reviewers:
// These JNI functions are just moved over to column 1 and not changed
// in the compressed oops workspace.
JNIid::JNIid(Klass* holder, int offset, JNIid* next) {
  _holder = holder;
  _offset = offset;
  _next = next;
  debug_only(_is_static_field_id = false;)
}


JNIid* JNIid::find(int offset) {
  JNIid* current = this;
  while (current != NULL) {
    if (current->offset() == offset) return current;
    current = current->next();
  }
  return NULL;
}

void JNIid::deallocate(JNIid* current) {
  while (current != NULL) {
    JNIid* next = current->next();
    delete current;
    current = next;
  }
}


void JNIid::verify(Klass* holder) {
  int first_field_offset  = InstanceMirrorKlass::offset_of_static_fields();
  int end_field_offset;
  end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize);

  JNIid* current = this;
  while (current != NULL) {
    guarantee(current->holder() == holder, "Invalid klass in JNIid");
#ifdef ASSERT
    int o = current->offset();
    if (current->is_static_field_id()) {
      guarantee(o >= first_field_offset  && o < end_field_offset,  "Invalid static field offset in JNIid");
    }
#endif
    current = current->next();
  }
}


#ifdef ASSERT
void InstanceKlass::set_init_state(ClassState state) {
  bool good_state = is_shared() ? (_init_state <= state)
                                               : (_init_state < state);
  assert(good_state || state == allocated, "illegal state transition");
  assert(_init_thread == NULL, "should be cleared before state change");
  _init_state = (u1)state;
}
#endif


// RedefineClasses() support for previous versions:

// Purge previous versions
void InstanceKlass::purge_previous_versions(InstanceKlass* ik) {
  if (ik->previous_versions() != NULL) {
    // This klass has previous versions so see what we can cleanup
    // while it is safe to do so.

    int deleted_count = 0;    // leave debugging breadcrumbs
    int live_count = 0;
    ClassLoaderData* loader_data = ik->class_loader_data();
    assert(loader_data != NULL, "should never be null");

    // RC_TRACE macro has an embedded ResourceMark
    RC_TRACE(0x00000200, ("purge: %s: previous versions", ik->external_name()));

    // previous versions are linked together through the InstanceKlass
    InstanceKlass* pv_node = ik->previous_versions();
    InstanceKlass* last = ik;
    int version = 0;

    // check the previous versions list
    for (; pv_node != NULL; ) {

      ConstantPool* pvcp = pv_node->constants();
      assert(pvcp != NULL, "cp ref was unexpectedly cleared");


      if (!pvcp->on_stack()) {
        // If the constant pool isn't on stack, none of the methods
        // are executing.  Unlink this previous_version.
        // The previous version InstanceKlass is on the ClassLoaderData deallocate list
        // so will be deallocated during the next phase of class unloading.
        pv_node = pv_node->previous_versions();
        last->link_previous_versions(pv_node);
        deleted_count++;
        version++;
        continue;
      } else {
        RC_TRACE(0x00000200, ("purge: previous version " INTPTR_FORMAT " is alive",
                              pv_node));
        assert(pvcp->pool_holder() != NULL, "Constant pool with no holder");
        guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
        live_count++;
      }

      // At least one method is live in this previous version so clean its MethodData.
      // Reset dead EMCP methods not to get breakpoints.
      // All methods are deallocated when all of the methods for this class are no
      // longer running.
      Array<Method*>* method_refs = pv_node->methods();
      if (method_refs != NULL) {
        RC_TRACE(0x00000200, ("purge: previous methods length=%d",
          method_refs->length()));
        for (int j = 0; j < method_refs->length(); j++) {
          Method* method = method_refs->at(j);

          if (!method->on_stack()) {
            // no breakpoints for non-running methods
            if (method->is_running_emcp()) {
              method->set_running_emcp(false);
            }
          } else {
            assert (method->is_obsolete() || method->is_running_emcp(),
                    "emcp method cannot run after emcp bit is cleared");
            // RC_TRACE macro has an embedded ResourceMark
            RC_TRACE(0x00000200,
              ("purge: %s(%s): prev method @%d in version @%d is alive",
              method->name()->as_C_string(),
              method->signature()->as_C_string(), j, version));
            if (method->method_data() != NULL) {
              // Clean out any weak method links for running methods
              // (also should include not EMCP methods)
              method->method_data()->clean_weak_method_links();
            }
          }
        }
      }
      // next previous version
      last = pv_node;
      pv_node = pv_node->previous_versions();
      version++;
    }
    RC_TRACE(0x00000200,
      ("purge: previous version stats: live=%d, deleted=%d", live_count,
      deleted_count));
  }

  // Clean MethodData of this class's methods so they don't refer to
  // old methods that are no longer running.
  Array<Method*>* methods = ik->methods();
  int num_methods = methods->length();
  for (int index2 = 0; index2 < num_methods; ++index2) {
    if (methods->at(index2)->method_data() != NULL) {
      methods->at(index2)->method_data()->clean_weak_method_links();
    }
  }
}

void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
                                                int emcp_method_count) {
  int obsolete_method_count = old_methods->length() - emcp_method_count;

  if (emcp_method_count != 0 && obsolete_method_count != 0 &&
      _previous_versions != NULL) {
    // We have a mix of obsolete and EMCP methods so we have to
    // clear out any matching EMCP method entries the hard way.
    int local_count = 0;
    for (int i = 0; i < old_methods->length(); i++) {
      Method* old_method = old_methods->at(i);
      if (old_method->is_obsolete()) {
        // only obsolete methods are interesting
        Symbol* m_name = old_method->name();
        Symbol* m_signature = old_method->signature();

        // previous versions are linked together through the InstanceKlass
        int j = 0;
        for (InstanceKlass* prev_version = _previous_versions;
             prev_version != NULL;
             prev_version = prev_version->previous_versions(), j++) {

          Array<Method*>* method_refs = prev_version->methods();
          for (int k = 0; k < method_refs->length(); k++) {
            Method* method = method_refs->at(k);

            if (!method->is_obsolete() &&
                method->name() == m_name &&
                method->signature() == m_signature) {
              // The current RedefineClasses() call has made all EMCP
              // versions of this method obsolete so mark it as obsolete
              RC_TRACE(0x00000400,
                ("add: %s(%s): flush obsolete method @%d in version @%d",
                m_name->as_C_string(), m_signature->as_C_string(), k, j));

              method->set_is_obsolete();
              break;
            }
          }

          // The previous loop may not find a matching EMCP method, but
          // that doesn't mean that we can optimize and not go any
          // further back in the PreviousVersion generations. The EMCP
          // method for this generation could have already been made obsolete,
          // but there still may be an older EMCP method that has not
          // been made obsolete.
        }

        if (++local_count >= obsolete_method_count) {
          // no more obsolete methods so bail out now
          break;
        }
      }
    }
  }
}

// Save the scratch_class as the previous version if any of the methods are running.
// The previous_versions are used to set breakpoints in EMCP methods and they are
// also used to clean MethodData links to redefined methods that are no longer running.
void InstanceKlass::add_previous_version(instanceKlassHandle scratch_class,
                                         int emcp_method_count) {
  assert(Thread::current()->is_VM_thread(),
         "only VMThread can add previous versions");

  // RC_TRACE macro has an embedded ResourceMark
  RC_TRACE(0x00000400, ("adding previous version ref for %s, EMCP_cnt=%d",
    scratch_class->external_name(), emcp_method_count));

  // Clean out old previous versions
  purge_previous_versions(this);

  // Mark newly obsolete methods in remaining previous versions.  An EMCP method from
  // a previous redefinition may be made obsolete by this redefinition.
  Array<Method*>* old_methods = scratch_class->methods();
  mark_newly_obsolete_methods(old_methods, emcp_method_count);

  // If the constant pool for this previous version of the class
  // is not marked as being on the stack, then none of the methods
  // in this previous version of the class are on the stack so
  // we don't need to add this as a previous version.
  ConstantPool* cp_ref = scratch_class->constants();
  if (!cp_ref->on_stack()) {
    RC_TRACE(0x00000400, ("add: scratch class not added; no methods are running"));
    return;
  }

  if (emcp_method_count != 0) {
    // At least one method is still running, check for EMCP methods
    for (int i = 0; i < old_methods->length(); i++) {
      Method* old_method = old_methods->at(i);
      if (!old_method->is_obsolete() && old_method->on_stack()) {
        // if EMCP method (not obsolete) is on the stack, mark as EMCP so that
        // we can add breakpoints for it.

        // We set the method->on_stack bit during safepoints for class redefinition and
        // class unloading and use this bit to set the is_running_emcp bit.
        // After the safepoint, the on_stack bit is cleared and the running emcp
        // method may exit.   If so, we would set a breakpoint in a method that
        // is never reached, but this won't be noticeable to the programmer.
        old_method->set_running_emcp(true);
        RC_TRACE(0x00000400, ("add: EMCP method %s is on_stack " INTPTR_FORMAT,
                              old_method->name_and_sig_as_C_string(), old_method));
      } else if (!old_method->is_obsolete()) {
        RC_TRACE(0x00000400, ("add: EMCP method %s is NOT on_stack " INTPTR_FORMAT,
                              old_method->name_and_sig_as_C_string(), old_method));
      }
    }
  }

  // Add previous version if any methods are still running.
  RC_TRACE(0x00000400, ("add: scratch class added; one of its methods is on_stack"));
  assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version");
  scratch_class->link_previous_versions(previous_versions());
  link_previous_versions(scratch_class());
} // end add_previous_version()

Method* InstanceKlass::method_with_idnum(int idnum) {
  Method* m = NULL;
  if (idnum < methods()->length()) {
    m = methods()->at(idnum);
  }
  if (m == NULL || m->method_idnum() != idnum) {
    for (int index = 0; index < methods()->length(); ++index) {
      m = methods()->at(index);
      if (m->method_idnum() == idnum) {
        return m;
      }
    }
    // None found, return null for the caller to handle.
    return NULL;
  }
  return m;
}


Method* InstanceKlass::method_with_orig_idnum(int idnum) {
  if (idnum >= methods()->length()) {
    return NULL;
  }
  Method* m = methods()->at(idnum);
  if (m != NULL && m->orig_method_idnum() == idnum) {
    return m;
  }
  // Obsolete method idnum does not match the original idnum
  for (int index = 0; index < methods()->length(); ++index) {
    m = methods()->at(index);
    if (m->orig_method_idnum() == idnum) {
      return m;
    }
  }
  // None found, return null for the caller to handle.
  return NULL;
}


Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
  InstanceKlass* holder = get_klass_version(version);
  if (holder == NULL) {
    return NULL; // The version of klass is gone, no method is found
  }
  Method* method = holder->method_with_orig_idnum(idnum);
  return method;
}


jint InstanceKlass::get_cached_class_file_len() {
  return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
}

unsigned char * InstanceKlass::get_cached_class_file_bytes() {
  return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
}

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120
  • 121
  • 122
  • 123
  • 124
  • 125
  • 126
  • 127
  • 128
  • 129
  • 130
  • 131
  • 132
  • 133
  • 134
  • 135
  • 136
  • 137
  • 138
  • 139
  • 140
  • 141
  • 142
  • 143
  • 144
  • 145
  • 146
  • 147
  • 148
  • 149
  • 150
  • 151
  • 152
  • 153
  • 154
  • 155
  • 156
  • 157
  • 158
  • 159
  • 160
  • 161
  • 162
  • 163
  • 164
  • 165
  • 166
  • 167
  • 168
  • 169
  • 170
  • 171
  • 172
  • 173
  • 174
  • 175
  • 176
  • 177
  • 178
  • 179
  • 180
  • 181
  • 182
  • 183
  • 184
  • 185
  • 186
  • 187
  • 188
  • 189
  • 190
  • 191
  • 192
  • 193
  • 194
  • 195
  • 196
  • 197
  • 198
  • 199
  • 200
  • 201
  • 202
  • 203
  • 204
  • 205
  • 206
  • 207
  • 208
  • 209
  • 210
  • 211
  • 212
  • 213
  • 214
  • 215
  • 216
  • 217
  • 218
  • 219
  • 220
  • 221
  • 222
  • 223
  • 224
  • 225
  • 226
  • 227
  • 228
  • 229
  • 230
  • 231
  • 232
  • 233
  • 234
  • 235
  • 236
  • 237
  • 238
  • 239
  • 240
  • 241
  • 242
  • 243
  • 244
  • 245
  • 246
  • 247
  • 248
  • 249
  • 250
  • 251
  • 252
  • 253
  • 254
  • 255
  • 256
  • 257
  • 258
  • 259
  • 260
  • 261
  • 262
  • 263
  • 264
  • 265
  • 266
  • 267
  • 268
  • 269
  • 270
  • 271
  • 272
  • 273
  • 274
  • 275
  • 276
  • 277
  • 278
  • 279
  • 280
  • 281
  • 282
  • 283
  • 284
  • 285
  • 286
  • 287
  • 288
  • 289
  • 290
  • 291
  • 292
  • 293
  • 294
  • 295
  • 296
  • 297
  • 298
  • 299
  • 300
  • 301
  • 302
  • 303
  • 304
  • 305
  • 306
  • 307
  • 308
  • 309
  • 310
  • 311
  • 312
  • 313
  • 314
  • 315
  • 316
  • 317
  • 318
  • 319
  • 320
  • 321
  • 322
  • 323
  • 324
  • 325
  • 326
  • 327
  • 328
  • 329
  • 330
  • 331
  • 332
  • 333
  • 334
  • 335
  • 336
  • 337
  • 338
  • 339
  • 340
  • 341
  • 342
  • 343
  • 344
  • 345
  • 346
  • 347
  • 348
  • 349
  • 350
  • 351
  • 352
  • 353
  • 354
  • 355
  • 356
  • 357
  • 358
  • 359
  • 360
  • 361
  • 362
  • 363
  • 364
  • 365
  • 366
  • 367
  • 368
  • 369
  • 370
  • 371
  • 372
  • 373
  • 374
  • 375
  • 376
  • 377
  • 378
  • 379
  • 380
  • 381
  • 382
  • 383
  • 384
  • 385
  • 386
  • 387
  • 388
  • 389
  • 390
  • 391
  • 392
  • 393
  • 394
  • 395
  • 396
  • 397
  • 398
  • 399
  • 400
  • 401
  • 402
  • 403
  • 404
  • 405
  • 406
  • 407
  • 408
  • 409
  • 410
  • 411
  • 412
  • 413
  • 414
  • 415
  • 416
  • 417
  • 418
  • 419
  • 420
  • 421
  • 422
  • 423
  • 424
  • 425
  • 426
  • 427
  • 428
  • 429
  • 430
  • 431
  • 432
  • 433
  • 434
  • 435
  • 436
  • 437
  • 438
  • 439
  • 440
  • 441
  • 442
  • 443
  • 444
  • 445
  • 446
  • 447
  • 448
  • 449
  • 450
  • 451
  • 452
  • 453
  • 454
  • 455
  • 456
  • 457
  • 458
  • 459
  • 460
  • 461
  • 462
  • 463
  • 464
  • 465
  • 466
  • 467
  • 468
  • 469
  • 470
  • 471
  • 472
  • 473
  • 474
  • 475
  • 476
  • 477
  • 478
  • 479
  • 480
  • 481
  • 482
  • 483
  • 484
  • 485
  • 486
  • 487
  • 488
  • 489
  • 490
  • 491
  • 492
  • 493
  • 494
  • 495
  • 496
  • 497
  • 498
  • 499
  • 500
  • 501
  • 502
  • 503
  • 504
  • 505
  • 506
  • 507
  • 508
  • 509
  • 510
  • 511
  • 512
  • 513
  • 514
  • 515
  • 516
  • 517
  • 518
  • 519
  • 520
  • 521
  • 522
  • 523
  • 524
  • 525
  • 526
  • 527
  • 528
  • 529
  • 530
  • 531
  • 532
  • 533
  • 534
  • 535
  • 536
  • 537
  • 538
  • 539
  • 540
  • 541
  • 542
  • 543
  • 544
  • 545
  • 546
  • 547
  • 548
  • 549
  • 550
  • 551
  • 552
  • 553
  • 554
  • 555
  • 556
  • 557
  • 558
  • 559
  • 560
  • 561
  • 562
  • 563
  • 564
  • 565
  • 566
  • 567
  • 568
  • 569
  • 570
  • 571
  • 572
  • 573
  • 574
  • 575
  • 576
  • 577
  • 578
  • 579
  • 580
  • 581
  • 582
  • 583
  • 584
  • 585
  • 586
  • 587
  • 588
  • 589
  • 590
  • 591
  • 592
  • 593
  • 594
  • 595
  • 596
  • 597
  • 598
  • 599
  • 600
  • 601
  • 602
  • 603
  • 604
  • 605
  • 606
  • 607
  • 608
  • 609
  • 610
  • 611
  • 612
  • 613
  • 614
  • 615
  • 616
  • 617
  • 618
  • 619
  • 620
  • 621
  • 622
  • 623
  • 624
  • 625
  • 626
  • 627
  • 628
  • 629
  • 630
  • 631
  • 632
  • 633
  • 634
  • 635
  • 636
  • 637
  • 638
  • 639
  • 640
  • 641
  • 642
  • 643
  • 644
  • 645
  • 646
  • 647
  • 648
  • 649
  • 650
  • 651
  • 652
  • 653
  • 654
  • 655
  • 656
  • 657
  • 658
  • 659
  • 660
  • 661
  • 662
  • 663
  • 664
  • 665
  • 666
  • 667
  • 668
  • 669
  • 670
  • 671
  • 672
  • 673
  • 674
  • 675
  • 676
  • 677
  • 678
  • 679
  • 680
  • 681
  • 682
  • 683
  • 684
  • 685
  • 686
  • 687
  • 688
  • 689
  • 690
  • 691
  • 692
  • 693
  • 694
  • 695
  • 696
  • 697
  • 698
  • 699
  • 700
  • 701
  • 702
  • 703
  • 704
  • 705
  • 706
  • 707
  • 708
  • 709
  • 710
  • 711
  • 712
  • 713
  • 714
  • 715
  • 716
  • 717
  • 718
  • 719
  • 720
  • 721
  • 722
  • 723
  • 724
  • 725
  • 726
  • 727
  • 728
  • 729
  • 730
  • 731
  • 732
  • 733
  • 734
  • 735
  • 736
  • 737
  • 738
  • 739
  • 740
  • 741
  • 742
  • 743
  • 744
  • 745
  • 746
  • 747
  • 748
  • 749
  • 750
  • 751
  • 752
  • 753
  • 754
  • 755
  • 756
  • 757
  • 758
  • 759
  • 760
  • 761
  • 762
  • 763
  • 764
  • 765
  • 766
  • 767
  • 768
  • 769
  • 770
  • 771
  • 772
  • 773
  • 774
  • 775
  • 776
  • 777
  • 778
  • 779
  • 780
  • 781
  • 782
  • 783
  • 784
  • 785
  • 786
  • 787
  • 788
  • 789
  • 790
  • 791
  • 792
  • 793
  • 794
  • 795
  • 796
  • 797
  • 798
  • 799
  • 800
  • 801
  • 802
  • 803
  • 804
  • 805
  • 806
  • 807
  • 808
  • 809
  • 810
  • 811
  • 812
  • 813
  • 814
  • 815
  • 816
  • 817
  • 818
  • 819
  • 820
  • 821
  • 822
  • 823
  • 824
  • 825
  • 826
  • 827
  • 828
  • 829
  • 830
  • 831
  • 832
  • 833
  • 834
  • 835
  • 836
  • 837
  • 838
  • 839
  • 840
  • 841
  • 842
  • 843
  • 844
  • 845
  • 846
  • 847
  • 848
  • 849
  • 850
  • 851
  • 852
  • 853
  • 854
  • 855
  • 856
  • 857
  • 858
  • 859
  • 860
  • 861
  • 862
  • 863
  • 864
  • 865
  • 866
  • 867
  • 868
  • 869
  • 870
  • 871
  • 872
  • 873
  • 874
  • 875
  • 876
  • 877
  • 878
  • 879
  • 880
  • 881
  • 882
  • 883
  • 884
  • 885
  • 886
  • 887
  • 888
  • 889
  • 890
  • 891
  • 892
  • 893
  • 894
  • 895
  • 896
  • 897
  • 898
  • 899
  • 900
  • 901
  • 902
  • 903
  • 904
  • 905
  • 906
  • 907
  • 908
  • 909
  • 910
  • 911
  • 912
  • 913
  • 914
  • 915
  • 916
  • 917
  • 918
  • 919
  • 920
  • 921
  • 922
  • 923
  • 924
  • 925
  • 926
  • 927
  • 928
  • 929
  • 930
  • 931
  • 932
  • 933
  • 934
  • 935
  • 936
  • 937
  • 938
  • 939
  • 940
  • 941
  • 942
  • 943
  • 944
  • 945
  • 946
  • 947
  • 948
  • 949
  • 950
  • 951
  • 952
  • 953
  • 954
  • 955
  • 956
  • 957
  • 958
  • 959
  • 960
  • 961
  • 962
  • 963
  • 964
  • 965
  • 966
  • 967
  • 968
  • 969
  • 970
  • 971
  • 972
  • 973
  • 974
  • 975
  • 976
  • 977
  • 978
  • 979
  • 980
  • 981
  • 982
  • 983
  • 984
  • 985
  • 986
  • 987
  • 988
  • 989
  • 990
  • 991
  • 992
  • 993
  • 994
  • 995
  • 996
  • 997
  • 998
  • 999
  • 1000
  • 1001
  • 1002
  • 1003
  • 1004
  • 1005
  • 1006
  • 1007
  • 1008
  • 1009
  • 1010
  • 1011
  • 1012
  • 1013
  • 1014
  • 1015
  • 1016
  • 1017
  • 1018
  • 1019
  • 1020
  • 1021
  • 1022
  • 1023
  • 1024
  • 1025
  • 1026
  • 1027
  • 1028
  • 1029
  • 1030
  • 1031
  • 1032
  • 1033
  • 1034
  • 1035
  • 1036
  • 1037
  • 1038
  • 1039
  • 1040
  • 1041
  • 1042
  • 1043
  • 1044
  • 1045
  • 1046
  • 1047
  • 1048
  • 1049
  • 1050
  • 1051
  • 1052
  • 1053
  • 1054
  • 1055
  • 1056
  • 1057
  • 1058
  • 1059
  • 1060
  • 1061
  • 1062
  • 1063
  • 1064
  • 1065
  • 1066
  • 1067
  • 1068
  • 1069
  • 1070
  • 1071
  • 1072
  • 1073
  • 1074
  • 1075
  • 1076
  • 1077
  • 1078
  • 1079
  • 1080
  • 1081
  • 1082
  • 1083
  • 1084
  • 1085
  • 1086
  • 1087
  • 1088
  • 1089
  • 1090
  • 1091
  • 1092
  • 1093
  • 1094
  • 1095
  • 1096
  • 1097
  • 1098
  • 1099
  • 1100
  • 1101
  • 1102
  • 1103
  • 1104
  • 1105
  • 1106
  • 1107
  • 1108
  • 1109
  • 1110
  • 1111
  • 1112
  • 1113
  • 1114
  • 1115
  • 1116
  • 1117
  • 1118
  • 1119
  • 1120
  • 1121
  • 1122
  • 1123
  • 1124
  • 1125
  • 1126
  • 1127
  • 1128
  • 1129
  • 1130
  • 1131
  • 1132
  • 1133
  • 1134
  • 1135
  • 1136
  • 1137
  • 1138
  • 1139
  • 1140
  • 1141
  • 1142
  • 1143
  • 1144
  • 1145
  • 1146
  • 1147
  • 1148
  • 1149
  • 1150
  • 1151
  • 1152
  • 1153
  • 1154
  • 1155
  • 1156
  • 1157
  • 1158
  • 1159
  • 1160
  • 1161
  • 1162
  • 1163
  • 1164
  • 1165
  • 1166
  • 1167
  • 1168
  • 1169
  • 1170
  • 1171
  • 1172
  • 1173
  • 1174
  • 1175
  • 1176
  • 1177
  • 1178
  • 1179
  • 1180
  • 1181
  • 1182
  • 1183
  • 1184
  • 1185
  • 1186
  • 1187
  • 1188
  • 1189
  • 1190
  • 1191
  • 1192
  • 1193
  • 1194
  • 1195
  • 1196
  • 1197
  • 1198
  • 1199
  • 1200
  • 1201
  • 1202
  • 1203
  • 1204
  • 1205
  • 1206
  • 1207
  • 1208
  • 1209
  • 1210
  • 1211
  • 1212
  • 1213
  • 1214
  • 1215
  • 1216
  • 1217
  • 1218
  • 1219
  • 1220
  • 1221
  • 1222
  • 1223
  • 1224
  • 1225
  • 1226
  • 1227
  • 1228
  • 1229
  • 1230
  • 1231
  • 1232
  • 1233
  • 1234
  • 1235
  • 1236
  • 1237
  • 1238
  • 1239
  • 1240
  • 1241
  • 1242
  • 1243
  • 1244
  • 1245
  • 1246
  • 1247
  • 1248
  • 1249
  • 1250
  • 1251
  • 1252
  • 1253
  • 1254
  • 1255
  • 1256
  • 1257
  • 1258
  • 1259
  • 1260
  • 1261
  • 1262
  • 1263
  • 1264
  • 1265
  • 1266
  • 1267
  • 1268
  • 1269
  • 1270
  • 1271
  • 1272
  • 1273
  • 1274
  • 1275
  • 1276
  • 1277
  • 1278
  • 1279
  • 1280
  • 1281
  • 1282
  • 1283
  • 1284
  • 1285
  • 1286
  • 1287
  • 1288
  • 1289
  • 1290
  • 1291
  • 1292
  • 1293
  • 1294
  • 1295
  • 1296
  • 1297
  • 1298
  • 1299
  • 1300
  • 1301
  • 1302
  • 1303
  • 1304
  • 1305
  • 1306
  • 1307
  • 1308
  • 1309
  • 1310
  • 1311
  • 1312
  • 1313
  • 1314
  • 1315
  • 1316
  • 1317
  • 1318
  • 1319
  • 1320
  • 1321
  • 1322
  • 1323
  • 1324
  • 1325
  • 1326
  • 1327
  • 1328
  • 1329
  • 1330
  • 1331
  • 1332
  • 1333
  • 1334
  • 1335
  • 1336
  • 1337
  • 1338
  • 1339
  • 1340
  • 1341
  • 1342
  • 1343
  • 1344
  • 1345
  • 1346
  • 1347
  • 1348
  • 1349
  • 1350
  • 1351
  • 1352
  • 1353
  • 1354
  • 1355
  • 1356
  • 1357
  • 1358
  • 1359
  • 1360
  • 1361
  • 1362
  • 1363
  • 1364
  • 1365
  • 1366
  • 1367
  • 1368
  • 1369
  • 1370
  • 1371
  • 1372
  • 1373
  • 1374
  • 1375
  • 1376
  • 1377
  • 1378
  • 1379
  • 1380
  • 1381
  • 1382
  • 1383
  • 1384
  • 1385
  • 1386
  • 1387
  • 1388
  • 1389
  • 1390
  • 1391
  • 1392
  • 1393
  • 1394
  • 1395
  • 1396
  • 1397
  • 1398
  • 1399
  • 1400
  • 1401
  • 1402
  • 1403
  • 1404
  • 1405
  • 1406
  • 1407
  • 1408
  • 1409
  • 1410
  • 1411
  • 1412
  • 1413
  • 1414
  • 1415
  • 1416
  • 1417
  • 1418
  • 1419
  • 1420
  • 1421
  • 1422
  • 1423
  • 1424
  • 1425
  • 1426
  • 1427
  • 1428
  • 1429
  • 1430
  • 1431
  • 1432
  • 1433
  • 1434
  • 1435
  • 1436
  • 1437
  • 1438
  • 1439
  • 1440
  • 1441
  • 1442
  • 1443
  • 1444
  • 1445
  • 1446
  • 1447
  • 1448
  • 1449
  • 1450
  • 1451
  • 1452
  • 1453
  • 1454
  • 1455
  • 1456
  • 1457
  • 1458
  • 1459
  • 1460
  • 1461
  • 1462
  • 1463
  • 1464
  • 1465
  • 1466
  • 1467
  • 1468
  • 1469
  • 1470
  • 1471
  • 1472
  • 1473
  • 1474
  • 1475
  • 1476
  • 1477
  • 1478
  • 1479
  • 1480
  • 1481
  • 1482
  • 1483
  • 1484
  • 1485
  • 1486
  • 1487
  • 1488
  • 1489
  • 1490
  • 1491
  • 1492
  • 1493
  • 1494
  • 1495
  • 1496
  • 1497
  • 1498
  • 1499
  • 1500
  • 1501
  • 1502
  • 1503
  • 1504
  • 1505
  • 1506
  • 1507
  • 1508
  • 1509
  • 1510
  • 1511
  • 1512
  • 1513
  • 1514
  • 1515
  • 1516
  • 1517
  • 1518
  • 1519
  • 1520
  • 1521
  • 1522
  • 1523
  • 1524
  • 1525
  • 1526
  • 1527
  • 1528
  • 1529
  • 1530
  • 1531
  • 1532
  • 1533
  • 1534
  • 1535
  • 1536
  • 1537
  • 1538
  • 1539
  • 1540
  • 1541
  • 1542
  • 1543
  • 1544
  • 1545
  • 1546
  • 1547
  • 1548
  • 1549
  • 1550
  • 1551
  • 1552
  • 1553
  • 1554
  • 1555
  • 1556
  • 1557
  • 1558
  • 1559
  • 1560
  • 1561
  • 1562
  • 1563
  • 1564
  • 1565
  • 1566
  • 1567
  • 1568
  • 1569
  • 1570
  • 1571
  • 1572
  • 1573
  • 1574
  • 1575
  • 1576
  • 1577
  • 1578
  • 1579
  • 1580
  • 1581
  • 1582
  • 1583
  • 1584
  • 1585
  • 1586
  • 1587
  • 1588
  • 1589
  • 1590
  • 1591
  • 1592
  • 1593
  • 1594
  • 1595
  • 1596
  • 1597
  • 1598
  • 1599
  • 1600
  • 1601
  • 1602
  • 1603
  • 1604
  • 1605
  • 1606
  • 1607
  • 1608
  • 1609
  • 1610
  • 1611
  • 1612
  • 1613
  • 1614
  • 1615
  • 1616
  • 1617
  • 1618
  • 1619
  • 1620
  • 1621
  • 1622
  • 1623
  • 1624
  • 1625
  • 1626
  • 1627
  • 1628
  • 1629
  • 1630
  • 1631
  • 1632
  • 1633
  • 1634
  • 1635
  • 1636
  • 1637
  • 1638
  • 1639
  • 1640
  • 1641
  • 1642
  • 1643
  • 1644
  • 1645
  • 1646
  • 1647
  • 1648
  • 1649
  • 1650
  • 1651
  • 1652
  • 1653
  • 1654
  • 1655
  • 1656
  • 1657
  • 1658
  • 1659
  • 1660
  • 1661
  • 1662
  • 1663
  • 1664
  • 1665
  • 1666
  • 1667
  • 1668
  • 1669
  • 1670
  • 1671
  • 1672
  • 1673
  • 1674
  • 1675
  • 1676
  • 1677
  • 1678
  • 1679
  • 1680
  • 1681
  • 1682
  • 1683
  • 1684
  • 1685
  • 1686
  • 1687
  • 1688
  • 1689
  • 1690
  • 1691
  • 1692
  • 1693
  • 1694
  • 1695
  • 1696
  • 1697
  • 1698
  • 1699
  • 1700
  • 1701
  • 1702
  • 1703
  • 1704
  • 1705
  • 1706
  • 1707
  • 1708
  • 1709
  • 1710
  • 1711
  • 1712
  • 1713
  • 1714
  • 1715
  • 1716
  • 1717
  • 1718
  • 1719
  • 1720
  • 1721
  • 1722
  • 1723
  • 1724
  • 1725
  • 1726
  • 1727
  • 1728
  • 1729
  • 1730
  • 1731
  • 1732
  • 1733
  • 1734
  • 1735
  • 1736
  • 1737
  • 1738
  • 1739
  • 1740
  • 1741
  • 1742
  • 1743
  • 1744
  • 1745
  • 1746
  • 1747
  • 1748
  • 1749
  • 1750
  • 1751
  • 1752
  • 1753
  • 1754
  • 1755
  • 1756
  • 1757
  • 1758
  • 1759
  • 1760
  • 1761
  • 1762
  • 1763
  • 1764
  • 1765
  • 1766
  • 1767
  • 1768
  • 1769
  • 1770
  • 1771
  • 1772
  • 1773
  • 1774
  • 1775
  • 1776
  • 1777
  • 1778
  • 1779
  • 1780
  • 1781
  • 1782
  • 1783
  • 1784
  • 1785
  • 1786
  • 1787
  • 1788
  • 1789
  • 1790
  • 1791
  • 1792
  • 1793
  • 1794
  • 1795
  • 1796
  • 1797
  • 1798
  • 1799
  • 1800
  • 1801
  • 1802
  • 1803
  • 1804
  • 1805
  • 1806
  • 1807
  • 1808
  • 1809
  • 1810
  • 1811
  • 1812
  • 1813
  • 1814
  • 1815
  • 1816
  • 1817
  • 1818
  • 1819
  • 1820
  • 1821
  • 1822
  • 1823
  • 1824
  • 1825
  • 1826
  • 1827
  • 1828
  • 1829
  • 1830
  • 1831
  • 1832
  • 1833
  • 1834
  • 1835
  • 1836
  • 1837
  • 1838
  • 1839
  • 1840
  • 1841
  • 1842
  • 1843
  • 1844
  • 1845
  • 1846
  • 1847
  • 1848
  • 1849
  • 1850
  • 1851
  • 1852
  • 1853
  • 1854
  • 1855
  • 1856
  • 1857
  • 1858
  • 1859
  • 1860
  • 1861
  • 1862
  • 1863
  • 1864
  • 1865
  • 1866
  • 1867
  • 1868
  • 1869
  • 1870
  • 1871
  • 1872
  • 1873
  • 1874
  • 1875
  • 1876
  • 1877
  • 1878
  • 1879
  • 1880
  • 1881
  • 1882
  • 1883
  • 1884
  • 1885
  • 1886
  • 1887
  • 1888
  • 1889
  • 1890
  • 1891
  • 1892
  • 1893
  • 1894
  • 1895
  • 1896
  • 1897
  • 1898
  • 1899
  • 1900
  • 1901
  • 1902
  • 1903
  • 1904
  • 1905
  • 1906
  • 1907
  • 1908
  • 1909
  • 1910
  • 1911
  • 1912
  • 1913
  • 1914
  • 1915
  • 1916
  • 1917
  • 1918
  • 1919
  • 1920
  • 1921
  • 1922
  • 1923
  • 1924
  • 1925
  • 1926
  • 1927
  • 1928
  • 1929
  • 1930
  • 1931
  • 1932
  • 1933
  • 1934
  • 1935
  • 1936
  • 1937
  • 1938
  • 1939
  • 1940
  • 1941
  • 1942
  • 1943
  • 1944
  • 1945
  • 1946
  • 1947
  • 1948
  • 1949
  • 1950
  • 1951
  • 1952
  • 1953
  • 1954
  • 1955
  • 1956
  • 1957
  • 1958
  • 1959
  • 1960
  • 1961
  • 1962
  • 1963
  • 1964
  • 1965
  • 1966
  • 1967
  • 1968
  • 1969
  • 1970
  • 1971
  • 1972
  • 1973
  • 1974
  • 1975
  • 1976
  • 1977
  • 1978
  • 1979
  • 1980
  • 1981
  • 1982
  • 1983
  • 1984
  • 1985
  • 1986
  • 1987
  • 1988
  • 1989
  • 1990
  • 1991
  • 1992
  • 1993
  • 1994
  • 1995
  • 1996
  • 1997
  • 1998
  • 1999
  • 2000
  • 2001
  • 2002
  • 2003
  • 2004
  • 2005
  • 2006
  • 2007
  • 2008
  • 2009
  • 2010
  • 2011
  • 2012
  • 2013
  • 2014
  • 2015
  • 2016
  • 2017
  • 2018
  • 2019
  • 2020
  • 2021
  • 2022
  • 2023
  • 2024
  • 2025
  • 2026
  • 2027
  • 2028
  • 2029
  • 2030
  • 2031
  • 2032
  • 2033
  • 2034
  • 2035
  • 2036
  • 2037
  • 2038
  • 2039
  • 2040
  • 2041
  • 2042
  • 2043
  • 2044
  • 2045
  • 2046
  • 2047
  • 2048
  • 2049
  • 2050
  • 2051
  • 2052
  • 2053
  • 2054
  • 2055
  • 2056
  • 2057
  • 2058
  • 2059
  • 2060
  • 2061
  • 2062
  • 2063
  • 2064
  • 2065
  • 2066
  • 2067
  • 2068
  • 2069
  • 2070
  • 2071
  • 2072
  • 2073
  • 2074
  • 2075
  • 2076
  • 2077
  • 2078
  • 2079
  • 2080
  • 2081
  • 2082
  • 2083
  • 2084
  • 2085
  • 2086
  • 2087
  • 2088
  • 2089
  • 2090
  • 2091
  • 2092
  • 2093
  • 2094
  • 2095
  • 2096
  • 2097
  • 2098
  • 2099
  • 2100
  • 2101
  • 2102
  • 2103
  • 2104
  • 2105
  • 2106
  • 2107
  • 2108
  • 2109
  • 2110
  • 2111
  • 2112
  • 2113
  • 2114
  • 2115
  • 2116
  • 2117
  • 2118
  • 2119
  • 2120
  • 2121
  • 2122
  • 2123
  • 2124
  • 2125
  • 2126
  • 2127
  • 2128
  • 2129
  • 2130
  • 2131
  • 2132
  • 2133
  • 2134
  • 2135
  • 2136
  • 2137
  • 2138
  • 2139
  • 2140
  • 2141
  • 2142
  • 2143
  • 2144
  • 2145
  • 2146
  • 2147
  • 2148
  • 2149
  • 2150
  • 2151
  • 2152
  • 2153
  • 2154
  • 2155
  • 2156
  • 2157
  • 2158
  • 2159
  • 2160
  • 2161
  • 2162
  • 2163
  • 2164
  • 2165
  • 2166
  • 2167
  • 2168
  • 2169
  • 2170
  • 2171
  • 2172
  • 2173
  • 2174
  • 2175
  • 2176
  • 2177
  • 2178
  • 2179
  • 2180
  • 2181
  • 2182
  • 2183
  • 2184
  • 2185
  • 2186
  • 2187
  • 2188
  • 2189
  • 2190
  • 2191
  • 2192
  • 2193
  • 2194
  • 2195
  • 2196
  • 2197
  • 2198
  • 2199
  • 2200
  • 2201
  • 2202
  • 2203
  • 2204
  • 2205
  • 2206
  • 2207
  • 2208
  • 2209
  • 2210
  • 2211
  • 2212
  • 2213
  • 2214
  • 2215
  • 2216
  • 2217
  • 2218
  • 2219
  • 2220
  • 2221
  • 2222
  • 2223
  • 2224
  • 2225
  • 2226
  • 2227
  • 2228
  • 2229
  • 2230
  • 2231
  • 2232
  • 2233
  • 2234
  • 2235
  • 2236
  • 2237
  • 2238
  • 2239
  • 2240
  • 2241
  • 2242
  • 2243
  • 2244
  • 2245
  • 2246
  • 2247
  • 2248
  • 2249
  • 2250
  • 2251
  • 2252
  • 2253
  • 2254
  • 2255
  • 2256
  • 2257
  • 2258
  • 2259
  • 2260
  • 2261
  • 2262
  • 2263
  • 2264
  • 2265
  • 2266
  • 2267
  • 2268
  • 2269
  • 2270
  • 2271
  • 2272
  • 2273
  • 2274
  • 2275
  • 2276
  • 2277
  • 2278
  • 2279
  • 2280
  • 2281
  • 2282
  • 2283
  • 2284
  • 2285
  • 2286
  • 2287
  • 2288
  • 2289
  • 2290
  • 2291
  • 2292
  • 2293
  • 2294
  • 2295
  • 2296
  • 2297
  • 2298
  • 2299
  • 2300
  • 2301
  • 2302
  • 2303
  • 2304
  • 2305
  • 2306
  • 2307
  • 2308
  • 2309
  • 2310
  • 2311
  • 2312
  • 2313
  • 2314
  • 2315
  • 2316
  • 2317
  • 2318
  • 2319
  • 2320
  • 2321
  • 2322
  • 2323
  • 2324
  • 2325
  • 2326
  • 2327
  • 2328
  • 2329
  • 2330
  • 2331
  • 2332
  • 2333
  • 2334
  • 2335
  • 2336
  • 2337
  • 2338
  • 2339
  • 2340
  • 2341
  • 2342
  • 2343
  • 2344
  • 2345
  • 2346
  • 2347
  • 2348
  • 2349
  • 2350
  • 2351
  • 2352
  • 2353
  • 2354
  • 2355
  • 2356
  • 2357
  • 2358
  • 2359
  • 2360
  • 2361
  • 2362
  • 2363
  • 2364
  • 2365
  • 2366
  • 2367
  • 2368
  • 2369
  • 2370
  • 2371
  • 2372
  • 2373
  • 2374
  • 2375
  • 2376
  • 2377
  • 2378
  • 2379
  • 2380
  • 2381
  • 2382
  • 2383
  • 2384
  • 2385
  • 2386
  • 2387
  • 2388
  • 2389
  • 2390
  • 2391
  • 2392
  • 2393
  • 2394
  • 2395
  • 2396
  • 2397
  • 2398
  • 2399
  • 2400
  • 2401
  • 2402
  • 2403
  • 2404
  • 2405
  • 2406
  • 2407
  • 2408
  • 2409
  • 2410
  • 2411
  • 2412
  • 2413
  • 2414
  • 2415
  • 2416
  • 2417
  • 2418
  • 2419
  • 2420
  • 2421
  • 2422
  • 2423
  • 2424
  • 2425
  • 2426
  • 2427
  • 2428
  • 2429
  • 2430
  • 2431
  • 2432
  • 2433
  • 2434
  • 2435
  • 2436
  • 2437
  • 2438
  • 2439
  • 2440
  • 2441
  • 2442
  • 2443
  • 2444
  • 2445
  • 2446
  • 2447
  • 2448
  • 2449
  • 2450
  • 2451
  • 2452
  • 2453
  • 2454
  • 2455
  • 2456
  • 2457
  • 2458
  • 2459
  • 2460
  • 2461
  • 2462
  • 2463
  • 2464
  • 2465
  • 2466
  • 2467
  • 2468
  • 2469
  • 2470
  • 2471
  • 2472
  • 2473
  • 2474
  • 2475
  • 2476
  • 2477
  • 2478
  • 2479
  • 2480
  • 2481
  • 2482
  • 2483
  • 2484
  • 2485
  • 2486
  • 2487
  • 2488
  • 2489
  • 2490
  • 2491
  • 2492
  • 2493
  • 2494
  • 2495
  • 2496
  • 2497
  • 2498
  • 2499
  • 2500
  • 2501
  • 2502
  • 2503
  • 2504
  • 2505
  • 2506
  • 2507
  • 2508
  • 2509
  • 2510
  • 2511
  • 2512
  • 2513
  • 2514
  • 2515
  • 2516
  • 2517
  • 2518
  • 2519
  • 2520
  • 2521
  • 2522
  • 2523
  • 2524
  • 2525
  • 2526
  • 2527
  • 2528
  • 2529
  • 2530
  • 2531
  • 2532
  • 2533
  • 2534
  • 2535
  • 2536
  • 2537
  • 2538
  • 2539
  • 2540
  • 2541
  • 2542
  • 2543
  • 2544
  • 2545
  • 2546
  • 2547
  • 2548
  • 2549
  • 2550
  • 2551
  • 2552
  • 2553
  • 2554
  • 2555
  • 2556
  • 2557
  • 2558
  • 2559
  • 2560
  • 2561
  • 2562
  • 2563
  • 2564
  • 2565
  • 2566
  • 2567
  • 2568
  • 2569
  • 2570
  • 2571
  • 2572
  • 2573
  • 2574
  • 2575
  • 2576
  • 2577
  • 2578
  • 2579
  • 2580
  • 2581
  • 2582
  • 2583
  • 2584
  • 2585
  • 2586
  • 2587
  • 2588
  • 2589
  • 2590
  • 2591
  • 2592
  • 2593
  • 2594
  • 2595
  • 2596
  • 2597
  • 2598
  • 2599
  • 2600
  • 2601
  • 2602
  • 2603
  • 2604
  • 2605
  • 2606
  • 2607
  • 2608
  • 2609
  • 2610
  • 2611
  • 2612
  • 2613
  • 2614
  • 2615
  • 2616
  • 2617
  • 2618
  • 2619
  • 2620
  • 2621
  • 2622
  • 2623
  • 2624
  • 2625
  • 2626
  • 2627
  • 2628
  • 2629
  • 2630
  • 2631
  • 2632
  • 2633
  • 2634
  • 2635
  • 2636
  • 2637
  • 2638
  • 2639
  • 2640
  • 2641
  • 2642
  • 2643
  • 2644
  • 2645
  • 2646
  • 2647
  • 2648
  • 2649
  • 2650
  • 2651
  • 2652
  • 2653
  • 2654
  • 2655
  • 2656
  • 2657
  • 2658
  • 2659
  • 2660
  • 2661
  • 2662
  • 2663
  • 2664
  • 2665
  • 2666
  • 2667
  • 2668
  • 2669
  • 2670
  • 2671
  • 2672
  • 2673
  • 2674
  • 2675
  • 2676
  • 2677
  • 2678
  • 2679
  • 2680
  • 2681
  • 2682
  • 2683
  • 2684
  • 2685
  • 2686
  • 2687
  • 2688
  • 2689
  • 2690
  • 2691
  • 2692
  • 2693
  • 2694
  • 2695
  • 2696
  • 2697
  • 2698
  • 2699
  • 2700
  • 2701
  • 2702
  • 2703
  • 2704
  • 2705
  • 2706
  • 2707
  • 2708
  • 2709
  • 2710
  • 2711
  • 2712
  • 2713
  • 2714
  • 2715
  • 2716
  • 2717
  • 2718
  • 2719
  • 2720
  • 2721
  • 2722
  • 2723
  • 2724
  • 2725
  • 2726
  • 2727
  • 2728
  • 2729
  • 2730
  • 2731
  • 2732
  • 2733
  • 2734
  • 2735
  • 2736
  • 2737
  • 2738
  • 2739
  • 2740
  • 2741
  • 2742
  • 2743
  • 2744
  • 2745
  • 2746
  • 2747
  • 2748
  • 2749
  • 2750
  • 2751
  • 2752
  • 2753
  • 2754
  • 2755
  • 2756
  • 2757
  • 2758
  • 2759
  • 2760
  • 2761
  • 2762
  • 2763
  • 2764
  • 2765
  • 2766
  • 2767
  • 2768
  • 2769
  • 2770
  • 2771
  • 2772
  • 2773
  • 2774
  • 2775
  • 2776
  • 2777
  • 2778
  • 2779
  • 2780
  • 2781
  • 2782
  • 2783
  • 2784
  • 2785
  • 2786
  • 2787
  • 2788
  • 2789
  • 2790
  • 2791
  • 2792
  • 2793
  • 2794
  • 2795
  • 2796
  • 2797
  • 2798
  • 2799
  • 2800
  • 2801
  • 2802
  • 2803
  • 2804
  • 2805
  • 2806
  • 2807
  • 2808
  • 2809
  • 2810
  • 2811
  • 2812
  • 2813
  • 2814
  • 2815
  • 2816
  • 2817
  • 2818
  • 2819
  • 2820
  • 2821
  • 2822
  • 2823
  • 2824
  • 2825
  • 2826
  • 2827
  • 2828
  • 2829
  • 2830
  • 2831
  • 2832
  • 2833
  • 2834
  • 2835
  • 2836
  • 2837
  • 2838
  • 2839
  • 2840
  • 2841
  • 2842
  • 2843
  • 2844
  • 2845
  • 2846
  • 2847
  • 2848
  • 2849
  • 2850
  • 2851
  • 2852
  • 2853
  • 2854
  • 2855
  • 2856
  • 2857
  • 2858
  • 2859
  • 2860
  • 2861
  • 2862
  • 2863
  • 2864
  • 2865
  • 2866
  • 2867
  • 2868
  • 2869
  • 2870
  • 2871
  • 2872
  • 2873
  • 2874
  • 2875
  • 2876
  • 2877
  • 2878
  • 2879
  • 2880
  • 2881
  • 2882
  • 2883
  • 2884
  • 2885
  • 2886
  • 2887
  • 2888
  • 2889
  • 2890
  • 2891
  • 2892
  • 2893
  • 2894
  • 2895
  • 2896
  • 2897
  • 2898
  • 2899
  • 2900
  • 2901
  • 2902
  • 2903
  • 2904
  • 2905
  • 2906
  • 2907
  • 2908
  • 2909
  • 2910
  • 2911
  • 2912
  • 2913
  • 2914
  • 2915
  • 2916
  • 2917
  • 2918
  • 2919
  • 2920
  • 2921
  • 2922
  • 2923
  • 2924
  • 2925
  • 2926
  • 2927
  • 2928
  • 2929
  • 2930
  • 2931
  • 2932
  • 2933
  • 2934
  • 2935
  • 2936
  • 2937
  • 2938
  • 2939
  • 2940
  • 2941
  • 2942
  • 2943
  • 2944
  • 2945
  • 2946
  • 2947
  • 2948
  • 2949
  • 2950
  • 2951
  • 2952
  • 2953
  • 2954
  • 2955
  • 2956
  • 2957
  • 2958
  • 2959
  • 2960
  • 2961
  • 2962
  • 2963
  • 2964
  • 2965
  • 2966
  • 2967
  • 2968
  • 2969
  • 2970
  • 2971
  • 2972
  • 2973
  • 2974
  • 2975
  • 2976
  • 2977
  • 2978
  • 2979
  • 2980
  • 2981
  • 2982
  • 2983
  • 2984
  • 2985
  • 2986
  • 2987
  • 2988
  • 2989
  • 2990
  • 2991
  • 2992
  • 2993
  • 2994
  • 2995
  • 2996
  • 2997
  • 2998
  • 2999
  • 3000
  • 3001
  • 3002
  • 3003
  • 3004
  • 3005
  • 3006
  • 3007
  • 3008
  • 3009
  • 3010
  • 3011
  • 3012
  • 3013
  • 3014
  • 3015
  • 3016
  • 3017
  • 3018
  • 3019
  • 3020
  • 3021
  • 3022
  • 3023
  • 3024
  • 3025
  • 3026
  • 3027
  • 3028
  • 3029
  • 3030
  • 3031
  • 3032
  • 3033
  • 3034
  • 3035
  • 3036
  • 3037
  • 3038
  • 3039
  • 3040
  • 3041
  • 3042
  • 3043
  • 3044
  • 3045
  • 3046
  • 3047
  • 3048
  • 3049
  • 3050
  • 3051
  • 3052
  • 3053
  • 3054
  • 3055
  • 3056
  • 3057
  • 3058
  • 3059
  • 3060
  • 3061
  • 3062
  • 3063
  • 3064
  • 3065
  • 3066
  • 3067
  • 3068
  • 3069
  • 3070
  • 3071
  • 3072
  • 3073
  • 3074
  • 3075
  • 3076
  • 3077
  • 3078
  • 3079
  • 3080
  • 3081
  • 3082
  • 3083
  • 3084
  • 3085
  • 3086
  • 3087
  • 3088
  • 3089
  • 3090
  • 3091
  • 3092
  • 3093
  • 3094
  • 3095
  • 3096
  • 3097
  • 3098
  • 3099
  • 3100
  • 3101
  • 3102
  • 3103
  • 3104
  • 3105
  • 3106
  • 3107
  • 3108
  • 3109
  • 3110
  • 3111
  • 3112
  • 3113
  • 3114
  • 3115
  • 3116
  • 3117
  • 3118
  • 3119
  • 3120
  • 3121
  • 3122
  • 3123
  • 3124
  • 3125
  • 3126
  • 3127
  • 3128
  • 3129
  • 3130
  • 3131
  • 3132
  • 3133
  • 3134
  • 3135
  • 3136
  • 3137
  • 3138
  • 3139
  • 3140
  • 3141
  • 3142
  • 3143
  • 3144
  • 3145
  • 3146
  • 3147
  • 3148
  • 3149
  • 3150
  • 3151
  • 3152
  • 3153
  • 3154
  • 3155
  • 3156
  • 3157
  • 3158
  • 3159
  • 3160
  • 3161
  • 3162
  • 3163
  • 3164
  • 3165
  • 3166
  • 3167
  • 3168
  • 3169
  • 3170
  • 3171
  • 3172
  • 3173
  • 3174
  • 3175
  • 3176
  • 3177
  • 3178
  • 3179
  • 3180
  • 3181
  • 3182
  • 3183
  • 3184
  • 3185
  • 3186
  • 3187
  • 3188
  • 3189
  • 3190
  • 3191
  • 3192
  • 3193
  • 3194
  • 3195
  • 3196
  • 3197
  • 3198
  • 3199
  • 3200
  • 3201
  • 3202
  • 3203
  • 3204
  • 3205
  • 3206
  • 3207
  • 3208
  • 3209
  • 3210
  • 3211
  • 3212
  • 3213
  • 3214
  • 3215
  • 3216
  • 3217
  • 3218
  • 3219
  • 3220
  • 3221
  • 3222
  • 3223
  • 3224
  • 3225
  • 3226
  • 3227
  • 3228
  • 3229
  • 3230
  • 3231
  • 3232
  • 3233
  • 3234
  • 3235
  • 3236
  • 3237
  • 3238
  • 3239
  • 3240
  • 3241
  • 3242
  • 3243
  • 3244
  • 3245
  • 3246
  • 3247
  • 3248
  • 3249
  • 3250
  • 3251
  • 3252
  • 3253
  • 3254
  • 3255
  • 3256
  • 3257
  • 3258
  • 3259
  • 3260
  • 3261
  • 3262
  • 3263
  • 3264
  • 3265
  • 3266
  • 3267
  • 3268
  • 3269
  • 3270
  • 3271
  • 3272
  • 3273
  • 3274
  • 3275
  • 3276
  • 3277
  • 3278
  • 3279
  • 3280
  • 3281
  • 3282
  • 3283
  • 3284
  • 3285
  • 3286
  • 3287
  • 3288
  • 3289
  • 3290
  • 3291
  • 3292
  • 3293
  • 3294
  • 3295
  • 3296
  • 3297
  • 3298
  • 3299
  • 3300
  • 3301
  • 3302
  • 3303
  • 3304
  • 3305
  • 3306
  • 3307
  • 3308
  • 3309
  • 3310
  • 3311
  • 3312
  • 3313
  • 3314
  • 3315
  • 3316
  • 3317
  • 3318
  • 3319
  • 3320
  • 3321
  • 3322
  • 3323
  • 3324
  • 3325
  • 3326
  • 3327
  • 3328
  • 3329
  • 3330
  • 3331
  • 3332
  • 3333
  • 3334
  • 3335
  • 3336
  • 3337
  • 3338
  • 3339
  • 3340
  • 3341
  • 3342
  • 3343
  • 3344
  • 3345
  • 3346
  • 3347
  • 3348
  • 3349
  • 3350
  • 3351
  • 3352
  • 3353
  • 3354
  • 3355
  • 3356
  • 3357
  • 3358
  • 3359
  • 3360
  • 3361
  • 3362
  • 3363
  • 3364
  • 3365
  • 3366
  • 3367
  • 3368
  • 3369
  • 3370
  • 3371
  • 3372
  • 3373
  • 3374
  • 3375
  • 3376
  • 3377
  • 3378
  • 3379
  • 3380
  • 3381
  • 3382
  • 3383
  • 3384
  • 3385
  • 3386
  • 3387
  • 3388
  • 3389
  • 3390
  • 3391
  • 3392
  • 3393
  • 3394
  • 3395
  • 3396
  • 3397
  • 3398
  • 3399
  • 3400
  • 3401
  • 3402
  • 3403
  • 3404
  • 3405
  • 3406
  • 3407
  • 3408
  • 3409
  • 3410
  • 3411
  • 3412
  • 3413
  • 3414
  • 3415
  • 3416
  • 3417
  • 3418
  • 3419
  • 3420
  • 3421
  • 3422
  • 3423
  • 3424
  • 3425
  • 3426
  • 3427
  • 3428
  • 3429
  • 3430
  • 3431
  • 3432
  • 3433
  • 3434
  • 3435
  • 3436
  • 3437
  • 3438
  • 3439
  • 3440
  • 3441
  • 3442
  • 3443
  • 3444
  • 3445
  • 3446
  • 3447
  • 3448
  • 3449
  • 3450
  • 3451
  • 3452
  • 3453
  • 3454
  • 3455
  • 3456
  • 3457
  • 3458
  • 3459
  • 3460
  • 3461
  • 3462
  • 3463
  • 3464
  • 3465
  • 3466
  • 3467
  • 3468
  • 3469
  • 3470
  • 3471
  • 3472
  • 3473
  • 3474
  • 3475
  • 3476
  • 3477
  • 3478
  • 3479
  • 3480
  • 3481
  • 3482
  • 3483
  • 3484
  • 3485
  • 3486
  • 3487
  • 3488
  • 3489
  • 3490
  • 3491
  • 3492
  • 3493
  • 3494
  • 3495
  • 3496
  • 3497
  • 3498
  • 3499
  • 3500
  • 3501
  • 3502
  • 3503
  • 3504
  • 3505
  • 3506
  • 3507
  • 3508
  • 3509
  • 3510
  • 3511
  • 3512
  • 3513
  • 3514
  • 3515
  • 3516
  • 3517
  • 3518
  • 3519
  • 3520
  • 3521
  • 3522
  • 3523
  • 3524
  • 3525
  • 3526
  • 3527
  • 3528
  • 3529
  • 3530
  • 3531
  • 3532
  • 3533
  • 3534
  • 3535
  • 3536
  • 3537
  • 3538
  • 3539
  • 3540
  • 3541
  • 3542
  • 3543
  • 3544
  • 3545
  • 3546
  • 3547
  • 3548
  • 3549
  • 3550
  • 3551
  • 3552
  • 3553
  • 3554
  • 3555
  • 3556
  • 3557
  • 3558
  • 3559
  • 3560
  • 3561
  • 3562
  • 3563
  • 3564
  • 3565
  • 3566
  • 3567
  • 3568
  • 3569
  • 3570
  • 3571
  • 3572
  • 3573
  • 3574
  • 3575
  • 3576
  • 3577
  • 3578
  • 3579
  • 3580
  • 3581
  • 3582
  • 3583
  • 3584
  • 3585
  • 3586
  • 3587
  • 3588
  • 3589
  • 3590
  • 3591
  • 3592
  • 3593
  • 3594
  • 3595
  • 3596
  • 3597
  • 3598
  • 3599
  • 3600
  • 3601
  • 3602
  • 3603
  • 3604
  • 3605
  • 3606
  • 3607
  • 3608
  • 3609
  • 3610
  • 3611
  • 3612
  • 3613
  • 3614
  • 3615
  • 3616
  • 3617
  • 3618
  • 3619
  • 3620
  • 3621
  • 3622
  • 3623
  • 3624
  • 3625
  • 3626
  • 3627
  • 3628
  • 3629
  • 3630
  • 3631
  • 3632
  • 3633
  • 3634
  • 3635
  • 3636
  • 3637
  • 3638
  • 3639
  • 3640
  • 3641
  • 3642
  • 3643
  • 3644
  • 3645
  • 3646
  • 3647
  • 3648
  • 3649
  • 3650
  • 3651
  • 3652
  • 3653
  • 3654
  • 3655
  • 3656
  • 3657
  • 3658
  • 3659
  • 3660
  • 3661
  • 3662
  • 3663
  • 3664
  • 3665
  • 3666
  • 3667
  • 3668
  • 3669
  • 3670
  • 3671
  • 3672
  • 3673
  • 3674
  • 3675
  • 3676
  • 3677
  • 3678
  • 3679
  • 3680
  • 3681
  • 3682
  • 3683
  • 3684
  • 3685
  • 3686
  • 3687
  • 3688
  • 3689
  • 3690
  • 3691
  • 3692
  • 3693
  • 3694
  • 3695
  • 3696
  • 3697
  • 3698
  • 3699
  • 3700
  • 3701
  • 3702
  • 3703
  • 3704
  • 3705
  • 3706
  • 3707
  • 3708
  • 3709
  • 3710
  • 3711
  • 3712
  • 3713
  • 3714
  • 3715
  • 3716
  • 3717
  • 3718
  • 3719
  • 3720
  • 3721
  • 3722
  • 3723
  • 3724
  • 3725
  • 3726
  • 3727
  • 3728
  • 3729
  • 3730
  • 3731
  • 3732
  • 3733
  • 3734
  • 3735
  • 3736
  • 3737
  • 3738
  • 3739
  • 3740
  • 3741
  • 3742
  • 3743
  • 3744
  • 3745
  • 3746
  • 3747
  • 3748
  • 3749
  • 3750
  • 3751
  • 3752
  • 3753
  • 3754
  • 3755
  • 3756
  • 3757
  • 3758
  • 3759
  • 3760
  • 3761
  • 3762
  • 3763
  • 3764
  • 3765
  • 3766
  • 3767
  • 3768
  • 3769
  • 3770
  • 3771
  • 3772
  • 3773
  • 3774
  • 3775
  • 3776
  • 3777
  • 3778
  • 3779
  • 3780
  • 3781
  • 3782
  • 3783
  • 3784
  • 3785
  • 3786
  • 3787
  • 3788
  • 3789
  • 3790
  • 3791
  • 3792
  • 3793
  • 3794
  • 3795
  • 3796
  • 3797
  • 3798
  • 3799
  • 3800
  • 3801
  • 3802
  • 3803
  • 3804
  • 3805
  • 3806
  • 3807
  • 3808
  • 3809
  • 3810
  • 3811
  • 3812
  • 3813
  • 3814
  • 3815
  • 3816
  • 3817
  • 3818
  • 3819
  • 3820
  • 3821
  • 3822
  • 3823
  • 3824
  • 3825
  • 3826
  • 3827
  • 3828
  • 3829
  • 3830
  • 3831
  • 3832
  • 3833
  • 3834
  • 3835
  • 3836
  • 3837
  • 3838
  • 3839
  • 3840
  • 3841
  • 3842
  • 3843
  • 3844
  • 3845
  • 3846
  • 3847
  • 3848
  • 3849
  • 3850
  • 3851
  • 3852
  • 3853
  • 3854
  • 3855
  • 3856
  • 3857
  • 3858
  • 3859
  • 3860
  • 3861
  • 3862
  • 3863
  • 3864
  • 3865
  • 3866
  • 3867
  • 3868
  • 3869
  • 3870
  • 3871
  • 3872
  • 3873
  • 3874
  • 3875
  • 3876
  • 3877
  • 3878
  • 3879
  • 3880
  • 3881
  • 3882
  • 3883
  • 3884
  • 3885
  • 3886
  • 3887
  • 3888
  • 3889
  • 3890
  • 3891
  • 3892
  • 3893
  • 3894
  • 3895
  • 3896
  • 3897
  • 3898

重点从833行开始
InstanceKlass::initialize_impl

void InstanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
  // Make sure klass is linked (verified) before initialization
  // A class could already be verified, since it has been reflected upon.
  this_oop->link_class(CHECK);

  DTRACE_CLASSINIT_PROBE(required, InstanceKlass::cast(this_oop()), -1);

  bool wait = false;

  // refer to the JVM book page 47 for description of steps
  // Step 1
  {
    oop init_lock = this_oop->init_lock();
    ObjectLocker ol(init_lock, THREAD, init_lock != NULL);

    Thread *self = THREAD; // it's passed the current thread

    // Step 2
    // If we were to use wait() instead of waitInterruptibly() then
    // we might end up throwing IE from link/symbol resolution sites
    // that aren't expected to throw.  This would wreak havoc.  See 6320309.
    while(this_oop->is_being_initialized() && !this_oop->is_reentrant_initialization(self)) {
        wait = true;
      ol.waitUninterruptibly(CHECK);
    }

    // Step 3
    if (this_oop->is_being_initialized() && this_oop->is_reentrant_initialization(self)) {
      DTRACE_CLASSINIT_PROBE_WAIT(recursive, InstanceKlass::cast(this_oop()), -1,wait);
      return;
    }

    // Step 4
    if (this_oop->is_initialized()) {
      DTRACE_CLASSINIT_PROBE_WAIT(concurrent, InstanceKlass::cast(this_oop()), -1,wait);
      return;
    }

    // Step 5
    if (this_oop->is_in_error_state()) {
      DTRACE_CLASSINIT_PROBE_WAIT(erroneous, InstanceKlass::cast(this_oop()), -1,wait);
      ResourceMark rm(THREAD);
      const char* desc = "Could not initialize class ";
      const char* className = this_oop->external_name();
      size_t msglen = strlen(desc) + strlen(className) + 1;
      char* message = NEW_RESOURCE_ARRAY(char, msglen);
      if (NULL == message) {
        // Out of memory: can't create detailed error message
        THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
      } else {
        jio_snprintf(message, msglen, "%s%s", desc, className);
        THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
      }
    }

    // Step 6
    this_oop->set_init_state(being_initialized);
    this_oop->set_init_thread(self);
  }

  // Step 7
  // Next, if C is a class rather than an interface, initialize its super class and super
  // interfaces.
  if (!this_oop->is_interface()) {
    Klass* super_klass = this_oop->super();
    if (super_klass != NULL && super_klass->should_be_initialized()) {
      super_klass->initialize(THREAD);
    }
    // If C implements any interfaces that declares a non-abstract, non-static method,
    // the initialization of C triggers initialization of its super interfaces.
    // Only need to recurse if has_default_methods which includes declaring and
    // inheriting default methods
    if (!HAS_PENDING_EXCEPTION && this_oop->has_default_methods()) {
      this_oop->initialize_super_interfaces(this_oop, THREAD);
    }

    // If any exceptions, complete abruptly, throwing the same exception as above.
    if (HAS_PENDING_EXCEPTION) {
      Handle e(THREAD, PENDING_EXCEPTION);
      CLEAR_PENDING_EXCEPTION;
      {
        EXCEPTION_MARK;
        // Locks object, set state, and notify all waiting threads
        this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
        CLEAR_PENDING_EXCEPTION;
      }
      DTRACE_CLASSINIT_PROBE_WAIT(super__failed, InstanceKlass::cast(this_oop()), -1,wait);
      THROW_OOP(e());
    }
  }

  // Step 8
  {
    assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
    JavaThread* jt = (JavaThread*)THREAD;
    DTRACE_CLASSINIT_PROBE_WAIT(clinit, InstanceKlass::cast(this_oop()), -1,wait);
    // Timer includes any side effects of class initialization (resolution,
    // etc), but not recursive entry into call_class_initializer().
    PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
                             ClassLoader::perf_class_init_selftime(),
                             ClassLoader::perf_classes_inited(),
                             jt->get_thread_stat()->perf_recursion_counts_addr(),
                             jt->get_thread_stat()->perf_timers_addr(),
                             PerfClassTraceTime::CLASS_CLINIT);
    this_oop->call_class_initializer(THREAD);
  }

  // Step 9
  if (!HAS_PENDING_EXCEPTION) {
    this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
    { ResourceMark rm(THREAD);
      debug_only(this_oop->vtable()->verify(tty, true);)
    }
  }
  else {
    // Step 10 and 11
    Handle e(THREAD, PENDING_EXCEPTION);
    CLEAR_PENDING_EXCEPTION;
    // JVMTI has already reported the pending exception
    // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
    JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
    {
      EXCEPTION_MARK;
      this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
      CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
      // JVMTI has already reported the pending exception
      // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
      JvmtiExport::clear_detected_exception((JavaThread*)THREAD);
    }
    DTRACE_CLASSINIT_PROBE_WAIT(error, InstanceKlass::cast(this_oop()), -1,wait);
    if (e->is_a(SystemDictionary::Error_klass())) {
      THROW_OOP(e());
    } else {
      JavaCallArguments args(e);
      THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
                vmSymbols::throwable_void_signature(),
                &args);
    }
  }
  DTRACE_CLASSINIT_PROBE_WAIT(end, InstanceKlass::cast(this_oop()), -1,wait);
}
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120
  • 121
  • 122
  • 123
  • 124
  • 125
  • 126
  • 127
  • 128
  • 129
  • 130
  • 131
  • 132
  • 133
  • 134
  • 135
  • 136
  • 137
  • 138
  • 139
  • 140
  • 141

好了,上面是C++ 代码,中文翻译我复合上
官方文档:

调用InstanceKlass::initialize_impl()方法对类进行初始化。这个方法的源代码位置在oops/instanceKlass.cpp。在对类进行初始化之前必须保证类已经完成连接,I在连接完成后要进行类的初始化。initialize_impl()方法的实现如下:
instanceKlass.cpp

void InstanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
  // Make sure klass is linked (verified) before initialization
  // A class could already be verified, since it has been reflected upon.
  this_oop->link_class(CHECK);
 
  bool wait = false;
 
  // refer to the JVM book page 47 for description of steps
  // step 1:通过ObjectLocker在初始化之前进行加锁,防止多个线程并发初始化。
  {
    oop init_lock = this_oop->init_lock();
    ObjectLocker ol(init_lock, THREAD, init_lock != NULL);
 
    Thread *self = THREAD; // it's passed the current thread
 
    // step 2:如果当前instanceKlassHandle正在被初始化,且初始化线程不是当前线程,
    // 则执行ol.waitUninterruptibly(CHECK),等待其他线程初始化完成后通知。
    // If we were to use wait() instead of waitInterruptibly() then
    // we might end up throwing IE from link/symbol resolution sites
    // that aren't expected to throw.  This would wreak havoc.  See 6320309.
    while(
    this_oop->is_being_initialized() &&          // 类正在进行初始化(being_initialized状态)
    !this_oop->is_reentrant_initialization(self) // 执行初始化的线程不是当前的线程
    ){
        wait = true;
        ol.waitUninterruptibly(CHECK);
    }
 
    // step 3:当前类正在被当前线程正在被初始化。例如如果X类有静态变量指向new Y类实例,Y类中又有静态变量指向new X类实例,
    // 这样外部在调用X时需要初始化X类,初始化过程中又要触发Y类的初始化,而Y类初始化又再次触发X类的初始化
    if (
        this_oop->is_being_initialized() &&         // 类正在进行初始化(being_initialized状态)
    this_oop->is_reentrant_initialization(self) // 执行初始化的线程的就是当前线程
    ){
       return;
    }
 
    // step 4:类已经初始化完成(fully_initialized状态)
    if (this_oop->is_initialized()) {
       return;
    }
 
    // step 5:类的初始化出错(initialization_error状态),则抛出NoClassDefFoundError异常
    if (this_oop->is_in_error_state()) {
      ResourceMark rm(THREAD);
      const char*  desc = "Could not initialize class ";
      const char*  className = this_oop->external_name();
      size_t       msglen = strlen(desc) + strlen(className) + 1;
      char*        message = NEW_RESOURCE_ARRAY(char, msglen);
      if (NULL == message) {
          // Out of memory: can't create detailed error message
          THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
      } else {
          jio_snprintf(message, msglen, "%s%s", desc, className);
          THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
      }
    }
 
    // step 6:设置类的初始化状态为being_initialized,设置初始化的线程为当前线程
    this_oop->set_init_state(being_initialized);
    this_oop->set_init_thread(self);
  }
 
  // step 7:如果当前初始化的不是接口、父类不为空并且父类未初始化,则初始化其父类
  Klass* super_klass = this_oop->super();
  if (
      super_klass != NULL &&
      !this_oop->is_interface() &&
      // 也就是判断super_klass的状态是否为fully_initialized,
      // 如果是,should_be_initialized()方法将返回true
      super_klass->should_be_initialized()
  ){
    super_klass->initialize(THREAD);
    // ...
  }
 
  if (this_oop->has_default_methods()) {
    // Step 7.5: 初始化有默认方法的接口
    for (int i = 0; i < this_oop->local_interfaces()->length(); ++i) {
      Klass*          iface = this_oop->local_interfaces()->at(i);
      InstanceKlass*  ik = InstanceKlass::cast(iface);
      if (ik->has_default_methods() && ik->should_be_initialized()) {
          ik->initialize(THREAD);
          // ...
      }
    }
  }
 
  // Step 8
  // 执行类或接口的初始化方法<clinit>
  {
    this_oop->call_class_initializer(THREAD); // 调用类或接口的<clinit>方法
  }
 
  // Step 9
  // 如果初始化过程没有异常,说明已经完成初始化,设置类的状态为full_initialized并通知其他线程初始化已经完成。
  if (!HAS_PENDING_EXCEPTION) {
    this_oop->set_initialization_state_and_notify(fully_initialized, CHECK);
  }
  else {
    // Step 10 and 11
    // 如果初始化过程发生异常,则通过set_initialization_state_and_notify()方法设置类的
    // 状态为initialization_error并通知其他线程,然后抛出错误或异常
    Handle e(THREAD, PENDING_EXCEPTION);
    CLEAR_PENDING_EXCEPTION;
    {
      EXCEPTION_MARK;
      this_oop->set_initialization_state_and_notify(initialization_error, THREAD);
      CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
    }
    // ...
  }
}
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113

InstanceKlass类 重要的属性解析

_init_state表示类的状态,为枚举类型ClassState,定义了如下常量值:
- allocated(已分配内存)
- loaded(从class文件读取加载到内存中)
- linked(已经成功链接和校验)
- being_initialized(正在初始化)
- fully_initialized(已经完成初始化)
- initialization_error(初始化异常
_init_thread执行此类初始化的Thread指针

这段初始化klass步骤在JVM规范中有详细描述,假设当前类(接口)为C,它持有一个独有的初始化锁LC在这里插入图片描述

同步锁LC,防止并发导致多次初始化<br />    如果有其他线程正在初始化C,就释放LC并阻塞当前线程直到那个线程完成初始化<br />    如果是执行初始化的是当前线程,则表明是递归请求,释放LC并正常完成初始化<br />    如果C已经被初始化了,则释放LC并正常完成初始化<br />    如果C的对象处于一个错误状态,则释放LC并抛出NoClassDefFoundError异常<br />    记录C正在被当前线程初始化并释放LC,初始化类中所有final static字段<br />    如果C是一个类,初始化其父类和接口<br />    判断C是否打开断言<br />    执行类(接口)的初始化方法<br />    标记C已经完全初始化,并唤醒所有的等待线程<br />    如果初始化失败,则抛出异常,并将C标记为错误,同时唤醒所有的等待线程<br />​
  • 1

image.png
在这里插入图片描述


### ### 整个JVM对象分配的整体流程大致如下,

从 Class 文件到加载到内存中的类,到类卸载出内存位置 ### ![在这里插入图片描述](https://img-blog.csdnimg.cn/7c8003d82f70470481c4a9912fc843fc.png?x-oss-process=image/watermark,type_d3F5LXplbmhlaQ,shadow_50,text_Q1NETiBA5b6Q5bCP5Yag,size_20,color_FFFFFF,t_70,g_se,x_16)

IDEA Debug 的过程

在这里插入图片描述

注意 和
image.png
![image.png](https://img-blog.csdnimg.cn/img_convert/bcb43489c86e330f8bff6336d8a73218.png#clientId=ua9d483c5-b701-4&crop=0&crop=0&crop=1&crop=1&from=paste&height=262&id=uef5fc92c&margin=[object Object]&name=image.png&originHeight=523&originWidth=1399&originalType=binary&ratio=1&rotation=0&showTitle=false&size=81285&status=done&在这里插入图片描述
style=none&taskId=u196cc7e0-ea76-4c3a-9af9-fe2f4e3439d&title=&width=699.5)
AA 和BB 的clinit 结束 开始init
在这里插入图片描述

AA 被new 出来
在这里插入图片描述

![image.png](https://img-blog.csdnimg.cn/img_convert/c583a6fe61527bc94ad5ebc766f09d5f.png#clientId=ua9d483c5-b701-4&crop=0&crop=0&crop=1&crop=1&from=paste&height=292&id=u0de877f8&margin=[object Object]&name=image.png&originHeight=583&originWidth=1388&originalType=binary&ratio=1&rotation=0&showTitle=false&size=85072&status=done&style=none&taskId=ua889d760-d949-4989-9ad0-89dafbe4627&title=&width=694)
线程一已经完成工作,开始线程二工作
在这里插入图片描述在这里插入图片描述




线程的dump 结果

Full thread dump

"Thread-1@656" prio=5 tid=0xf nid=NA runnable
  java.lang.Thread.State: RUNNABLE
	  at test.InitDeadLock.lambda$main$1(InitDeadLock.java:11)
	  at test.InitDeadLock$$Lambda$2.295530567.run(Unknown Source:-1)
	  at java.lang.Thread.run(Thread.java:748)

"Finalizer@665" daemon prio=8 tid=0x3 nid=NA waiting
  java.lang.Thread.State: WAITING
	  at java.lang.Object.wait(Object.java:-1)
	  at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:144)
	  at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:165)
	  at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:216)

"Reference Handler@666" daemon prio=10 tid=0x2 nid=NA waiting
  java.lang.Thread.State: WAITING
	  at java.lang.Object.wait(Object.java:-1)
	  at java.lang.Object.wait(Object.java:502)
	  at java.lang.ref.Reference.tryHandlePending(Reference.java:191)
	  at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:153)

"DestroyJavaVM@657" prio=5 tid=0x10 nid=NA runnable
  java.lang.Thread.State: RUNNABLE

"Attach Listener@663" daemon prio=5 tid=0x5 nid=NA runnable
  java.lang.Thread.State: RUNNABLE

"Signal Dispatcher@664" daemon prio=9 tid=0x4 nid=NA runnable
  java.lang.Thread.State: RUNNABLE


  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32

在这里插入图片描述

**解释 深入理解java 虚拟机 **书中279页
在这里插入图片描述

本文内容由网友自发贡献,转载请注明出处:https://www.wpsshop.cn/w/weixin_40725706/article/detail/136255
推荐阅读
相关标签
  

闽ICP备14008679号