当前位置:   article > 正文

ssssssssss17

ssssssssss17
  1. vmIntrinsics::ID vmIntrinsics::for_boxing(BasicType type) {
  2. return wrapper_intrinsic(type, false);
  3. }
  4. vmIntrinsics::ID vmIntrinsics::for_unboxing(BasicType type) {
  5. return wrapper_intrinsic(type, true);
  6. }
  7. vmIntrinsics::ID vmIntrinsics::for_raw_conversion(BasicType src, BasicType dest) {
  8. #define SRC_DEST(s,d) (((int)(s) << 4) + (int)(d))
  9. switch (SRC_DEST(src, dest)) {
  10. case SRC_DEST(T_INT, T_FLOAT): return vmIntrinsics::_intBitsToFloat;
  11. case SRC_DEST(T_FLOAT, T_INT): return vmIntrinsics::_floatToRawIntBits;
  12. case SRC_DEST(T_LONG, T_DOUBLE): return vmIntrinsics::_longBitsToDouble;
  13. case SRC_DEST(T_DOUBLE, T_LONG): return vmIntrinsics::_doubleToRawLongBits;
  14. }
  15. #undef SRC_DEST
  16. return vmIntrinsics::_none;
  17. }
  18. bool vmIntrinsics::should_be_pinned(vmIntrinsics::ID id) {
  19. assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
  20. switch(id) {
  21. #ifdef JFR_HAVE_INTRINSICS
  22. case vmIntrinsics::_counterTime:
  23. #endif
  24. case vmIntrinsics::_currentTimeMillis:
  25. case vmIntrinsics::_nanoTime:
  26. return true;
  27. default:
  28. return false;
  29. }
  30. }
  31. #define VM_INTRINSIC_INITIALIZE(id, klass, name, sig, flags) #id "\0"
  32. static const char* vm_intrinsic_name_bodies =
  33. VM_INTRINSICS_DO(VM_INTRINSIC_INITIALIZE,
  34. VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE);
  35. static const char* vm_intrinsic_name_table[vmIntrinsics::ID_LIMIT];
  36. const char* vmIntrinsics::name_at(vmIntrinsics::ID id) {
  37. const char** nt = &vm_intrinsic_name_table[0];
  38. if (nt[_none] == NULL) {
  39. char* string = (char*) &vm_intrinsic_name_bodies[0];
  40. for (int index = FIRST_ID; index < ID_LIMIT; index++) {
  41. nt[index] = string;
  42. string += strlen(string); // skip string body
  43. string += 1; // skip trailing null
  44. }
  45. assert(!strcmp(nt[_hashCode], "_hashCode"), "lined up");
  46. nt[_none] = "_none";
  47. }
  48. if ((uint)id < (uint)ID_LIMIT)
  49. return vm_intrinsic_name_table[(uint)id];
  50. else
  51. return "(unknown intrinsic)";
  52. }
  53. inline bool match_F_R(jshort flags) {
  54. const int req = 0;
  55. const int neg = JVM_ACC_STATIC | JVM_ACC_SYNCHRONIZED;
  56. return (flags & (req | neg)) == req;
  57. }
  58. inline bool match_F_Y(jshort flags) {
  59. const int req = JVM_ACC_SYNCHRONIZED;
  60. const int neg = JVM_ACC_STATIC;
  61. return (flags & (req | neg)) == req;
  62. }
  63. inline bool match_F_RN(jshort flags) {
  64. const int req = JVM_ACC_NATIVE;
  65. const int neg = JVM_ACC_STATIC | JVM_ACC_SYNCHRONIZED;
  66. return (flags & (req | neg)) == req;
  67. }
  68. inline bool match_F_S(jshort flags) {
  69. const int req = JVM_ACC_STATIC;
  70. const int neg = JVM_ACC_SYNCHRONIZED;
  71. return (flags & (req | neg)) == req;
  72. }
  73. inline bool match_F_SN(jshort flags) {
  74. const int req = JVM_ACC_STATIC | JVM_ACC_NATIVE;
  75. const int neg = JVM_ACC_SYNCHRONIZED;
  76. return (flags & (req | neg)) == req;
  77. }
  78. inline bool match_F_RNY(jshort flags) {
  79. const int req = JVM_ACC_NATIVE | JVM_ACC_SYNCHRONIZED;
  80. const int neg = JVM_ACC_STATIC;
  81. return (flags & (req | neg)) == req;
  82. }
  83. #define ID3(x, y, z) (( jlong)(z) + \
  84. ((jlong)(y) << vmSymbols::log2_SID_LIMIT) + \
  85. ((jlong)(x) << (2*vmSymbols::log2_SID_LIMIT)) )
  86. #define SID_ENUM(n) vmSymbols::VM_SYMBOL_ENUM_NAME(n)
  87. vmIntrinsics::ID vmIntrinsics::find_id_impl(vmSymbols::SID holder,
  88. vmSymbols::SID name,
  89. vmSymbols::SID sig,
  90. jshort flags) {
  91. assert((int)vmSymbols::SID_LIMIT <= (1<<vmSymbols::log2_SID_LIMIT), "must fit");
  92. #define VM_INTRINSIC_CASE(id, klass, name, sig, fcode) \
  93. case ID3(SID_ENUM(klass), SID_ENUM(name), SID_ENUM(sig)): \
  94. if (!match_##fcode(flags)) break; \
  95. return id;
  96. switch (ID3(holder, name, sig)) {
  97. VM_INTRINSICS_DO(VM_INTRINSIC_CASE,
  98. VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE);
  99. }
  100. return vmIntrinsics::_none;
  101. #undef VM_INTRINSIC_CASE
  102. }
  103. const char* vmIntrinsics::short_name_as_C_string(vmIntrinsics::ID id, char* buf, int buflen) {
  104. const char* str = name_at(id);
  105. #ifndef PRODUCT
  106. const char* kname = vmSymbols::name_for(class_for(id));
  107. const char* mname = vmSymbols::name_for(name_for(id));
  108. const char* sname = vmSymbols::name_for(signature_for(id));
  109. const char* fname = "";
  110. switch (flags_for(id)) {
  111. case F_Y: fname = "synchronized "; break;
  112. case F_RN: fname = "native "; break;
  113. case F_SN: fname = "native static "; break;
  114. case F_S: fname = "static "; break;
  115. case F_RNY:fname = "native synchronized "; break;
  116. }
  117. const char* kptr = strrchr(kname, '/');
  118. if (kptr != NULL) kname = kptr + 1;
  119. int len = jio_snprintf(buf, buflen, "%s: %s%s.%s%s",
  120. str, fname, kname, mname, sname);
  121. if (len < buflen)
  122. str = buf;
  123. #endif //PRODUCT
  124. return str;
  125. }
  126. #define ID4(x, y, z, f) ((ID3(x, y, z) << vmIntrinsics::log2_FLAG_LIMIT) | (jlong) (f))
  127. static const jlong intrinsic_info_array[vmIntrinsics::ID_LIMIT+1] = {
  128. #define VM_INTRINSIC_INFO(ignore_id, klass, name, sig, fcode) \
  129. ID4(SID_ENUM(klass), SID_ENUM(name), SID_ENUM(sig), vmIntrinsics::fcode),
  130. 0, VM_INTRINSICS_DO(VM_INTRINSIC_INFO,
  131. VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE)
  132. 0
  133. #undef VM_INTRINSIC_INFO
  134. };
  135. inline jlong intrinsic_info(vmIntrinsics::ID id) {
  136. return intrinsic_info_array[vmIntrinsics::ID_from((int)id)];
  137. }
  138. vmSymbols::SID vmIntrinsics::class_for(vmIntrinsics::ID id) {
  139. jlong info = intrinsic_info(id);
  140. int shift = 2*vmSymbols::log2_SID_LIMIT + log2_FLAG_LIMIT, mask = right_n_bits(vmSymbols::log2_SID_LIMIT);
  141. assert(((ID4(1021,1022,1023,15) >> shift) & mask) == 1021, "");
  142. return vmSymbols::SID( (info >> shift) & mask );
  143. }
  144. vmSymbols::SID vmIntrinsics::name_for(vmIntrinsics::ID id) {
  145. jlong info = intrinsic_info(id);
  146. int shift = vmSymbols::log2_SID_LIMIT + log2_FLAG_LIMIT, mask = right_n_bits(vmSymbols::log2_SID_LIMIT);
  147. assert(((ID4(1021,1022,1023,15) >> shift) & mask) == 1022, "");
  148. return vmSymbols::SID( (info >> shift) & mask );
  149. }
  150. vmSymbols::SID vmIntrinsics::signature_for(vmIntrinsics::ID id) {
  151. jlong info = intrinsic_info(id);
  152. int shift = log2_FLAG_LIMIT, mask = right_n_bits(vmSymbols::log2_SID_LIMIT);
  153. assert(((ID4(1021,1022,1023,15) >> shift) & mask) == 1023, "");
  154. return vmSymbols::SID( (info >> shift) & mask );
  155. }
  156. vmIntrinsics::Flags vmIntrinsics::flags_for(vmIntrinsics::ID id) {
  157. jlong info = intrinsic_info(id);
  158. int shift = 0, mask = right_n_bits(log2_FLAG_LIMIT);
  159. assert(((ID4(1021,1022,1023,15) >> shift) & mask) == 15, "");
  160. return Flags( (info >> shift) & mask );
  161. }
  162. #ifndef PRODUCT
  163. static bool match_method(Method* m, Symbol* n, Symbol* s) {
  164. return (m->name() == n &&
  165. m->signature() == s);
  166. }
  167. static vmIntrinsics::ID match_method_with_klass(Method* m, Symbol* mk) {
  168. #define VM_INTRINSIC_MATCH(id, klassname, namepart, sigpart, flags) \
  169. { Symbol* k = vmSymbols::klassname(); \
  170. if (mk == k) { \
  171. Symbol* n = vmSymbols::namepart(); \
  172. Symbol* s = vmSymbols::sigpart(); \
  173. if (match_method(m, n, s)) \
  174. return vmIntrinsics::id; \
  175. } }
  176. VM_INTRINSICS_DO(VM_INTRINSIC_MATCH,
  177. VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE);
  178. return vmIntrinsics::_none;
  179. #undef VM_INTRINSIC_MATCH
  180. }
  181. void vmIntrinsics::verify_method(ID actual_id, Method* m) {
  182. Symbol* mk = m->method_holder()->name();
  183. ID declared_id = match_method_with_klass(m, mk);
  184. if (declared_id == actual_id) return; // success
  185. if (declared_id == _none && actual_id != _none && mk == vmSymbols::java_lang_StrictMath()) {
  186. switch (actual_id) {
  187. case _min:
  188. case _max:
  189. case _dsqrt:
  190. declared_id = match_method_with_klass(m, vmSymbols::java_lang_Math());
  191. if (declared_id == actual_id) return; // acceptable alias
  192. break;
  193. }
  194. }
  195. const char* declared_name = name_at(declared_id);
  196. const char* actual_name = name_at(actual_id);
  197. methodHandle mh = m;
  198. m = NULL;
  199. ttyLocker ttyl;
  200. if (xtty != NULL) {
  201. xtty->begin_elem("intrinsic_misdeclared actual='%s' declared='%s'",
  202. actual_name, declared_name);
  203. xtty->method(mh);
  204. xtty->end_elem("%s", "");
  205. }
  206. if (PrintMiscellaneous && (WizardMode || Verbose)) {
  207. tty->print_cr("*** misidentified method; %s(%d) should be %s(%d):",
  208. declared_name, declared_id, actual_name, actual_id);
  209. mh()->print_short_name(tty);
  210. tty->cr();
  211. }
  212. }
  213. #endif //PRODUCT
  214. C:\hotspot-69087d08d473\src\share\vm/classfile/vmSymbols.hpp
  215. #ifndef SHARE_VM_CLASSFILE_VMSYMBOLS_HPP
  216. #define SHARE_VM_CLASSFILE_VMSYMBOLS_HPP
  217. #include "jfr/support/jfrIntrinsics.hpp"
  218. #include "memory/iterator.hpp"
  219. #include "oops/symbol.hpp"
  220. #include "utilities/macros.hpp"
  221. #define VM_SYMBOL_ENUM_NAME(name) name##_enum
  222. #define VM_INTRINSIC_IGNORE(id, class, name, sig, flags) /*ignored*/
  223. #define VM_SYMBOL_IGNORE(id, name) /*ignored*/
  224. #define VM_ALIAS_IGNORE(id, id2) /*ignored*/
  225. #define VM_SYMBOLS_DO(template, do_alias) \
  226. template(java_lang_System, "java/lang/System") \
  227. template(java_lang_Object, "java/lang/Object") \
  228. template(java_lang_Class, "java/lang/Class") \
  229. template(java_lang_String, "java/lang/String") \
  230. template(java_lang_Thread, "java/lang/Thread") \
  231. template(java_lang_ThreadGroup, "java/lang/ThreadGroup") \
  232. template(java_lang_Cloneable, "java/lang/Cloneable") \
  233. template(java_lang_Throwable, "java/lang/Throwable") \
  234. template(java_lang_ClassLoader, "java/lang/ClassLoader") \
  235. template(java_lang_ClassLoader_NativeLibrary, "java/lang/ClassLoader\x024NativeLibrary") \
  236. template(java_lang_ThreadDeath, "java/lang/ThreadDeath") \
  237. template(java_lang_Boolean, "java/lang/Boolean") \
  238. template(java_lang_Character, "java/lang/Character") \
  239. template(java_lang_Character_CharacterCache, "java/lang/Character$CharacterCache") \
  240. template(java_lang_Float, "java/lang/Float") \
  241. template(java_lang_Double, "java/lang/Double") \
  242. template(java_lang_Byte, "java/lang/Byte") \
  243. template(java_lang_Byte_ByteCache, "java/lang/Byte$ByteCache") \
  244. template(java_lang_Short, "java/lang/Short") \
  245. template(java_lang_Short_ShortCache, "java/lang/Short$ShortCache") \
  246. template(java_lang_Integer, "java/lang/Integer") \
  247. template(java_lang_Integer_IntegerCache, "java/lang/Integer$IntegerCache") \
  248. template(java_lang_Long, "java/lang/Long") \
  249. template(java_lang_Long_LongCache, "java/lang/Long$LongCache") \
  250. template(java_lang_Shutdown, "java/lang/Shutdown") \
  251. template(java_lang_ref_Reference, "java/lang/ref/Reference") \
  252. template(java_lang_ref_SoftReference, "java/lang/ref/SoftReference") \
  253. template(java_lang_ref_WeakReference, "java/lang/ref/WeakReference") \
  254. template(java_lang_ref_FinalReference, "java/lang/ref/FinalReference") \
  255. template(java_lang_ref_PhantomReference, "java/lang/ref/PhantomReference") \
  256. template(sun_misc_Cleaner, "sun/misc/Cleaner") \
  257. template(java_lang_ref_Finalizer, "java/lang/ref/Finalizer") \
  258. template(java_lang_ref_ReferenceQueue, "java/lang/ref/ReferenceQueue") \
  259. template(java_lang_reflect_AccessibleObject, "java/lang/reflect/AccessibleObject") \
  260. template(java_lang_reflect_Method, "java/lang/reflect/Method") \
  261. template(java_lang_reflect_Constructor, "java/lang/reflect/Constructor") \
  262. template(java_lang_reflect_Field, "java/lang/reflect/Field") \
  263. template(java_lang_reflect_Parameter, "java/lang/reflect/Parameter") \
  264. template(java_lang_reflect_Array, "java/lang/reflect/Array") \
  265. template(java_lang_StringBuffer, "java/lang/StringBuffer") \
  266. template(java_lang_StringBuilder, "java/lang/StringBuilder") \
  267. template(java_lang_CharSequence, "java/lang/CharSequence") \
  268. template(java_lang_SecurityManager, "java/lang/SecurityManager") \
  269. template(java_security_AccessControlContext, "java/security/AccessControlContext") \
  270. template(java_security_CodeSource, "java/security/CodeSource") \
  271. template(java_security_ProtectionDomain, "java/security/ProtectionDomain") \
  272. template(java_security_SecureClassLoader, "java/security/SecureClassLoader") \
  273. template(java_net_URLClassLoader, "java/net/URLClassLoader") \
  274. template(java_net_URL, "java/net/URL") \
  275. template(java_util_jar_Manifest, "java/util/jar/Manifest") \
  276. template(impliesCreateAccessControlContext_name, "impliesCreateAccessControlContext") \
  277. template(java_io_OutputStream, "java/io/OutputStream") \
  278. template(java_io_Reader, "java/io/Reader") \
  279. template(java_io_BufferedReader, "java/io/BufferedReader") \
  280. template(java_io_File, "java/io/File") \
  281. template(java_io_FileInputStream, "java/io/FileInputStream") \
  282. template(java_io_ByteArrayInputStream, "java/io/ByteArrayInputStream") \
  283. template(java_io_Serializable, "java/io/Serializable") \
  284. template(java_util_Arrays, "java/util/Arrays") \
  285. template(java_util_Properties, "java/util/Properties") \
  286. template(java_util_Vector, "java/util/Vector") \
  287. template(java_util_AbstractList, "java/util/AbstractList") \
  288. template(java_util_Hashtable, "java/util/Hashtable") \
  289. template(java_lang_Compiler, "java/lang/Compiler") \
  290. template(sun_misc_Signal, "sun/misc/Signal") \
  291. template(sun_misc_Launcher, "sun/misc/Launcher") \
  292. template(java_lang_AssertionStatusDirectives, "java/lang/AssertionStatusDirectives") \
  293. template(getBootClassPathEntryForClass_name, "getBootClassPathEntryForClass") \
  294. template(sun_misc_PostVMInitHook, "sun/misc/PostVMInitHook") \
  295. template(sun_misc_Launcher_AppClassLoader, "sun/misc/Launcher$AppClassLoader") \
  296. template(sun_misc_Launcher_ExtClassLoader, "sun/misc/Launcher$ExtClassLoader") \
  297. \
  298. template(sun_misc_Version, "sun/misc/Version") \
  299. template(java_runtime_name_name, "java_runtime_name") \
  300. template(java_runtime_version_name, "java_runtime_version") \
  301. \
  302. template(tag_source_file, "SourceFile") \
  303. template(tag_inner_classes, "InnerClasses") \
  304. template(tag_constant_value, "ConstantValue") \
  305. template(tag_code, "Code") \
  306. template(tag_exceptions, "Exceptions") \
  307. template(tag_line_number_table, "LineNumberTable") \
  308. template(tag_local_variable_table, "LocalVariableTable") \
  309. template(tag_local_variable_type_table, "LocalVariableTypeTable") \
  310. template(tag_method_parameters, "MethodParameters") \
  311. template(tag_stack_map_table, "StackMapTable") \
  312. template(tag_synthetic, "Synthetic") \
  313. template(tag_deprecated, "Deprecated") \
  314. template(tag_source_debug_extension, "SourceDebugExtension") \
  315. template(tag_signature, "Signature") \
  316. template(tag_runtime_visible_annotations, "RuntimeVisibleAnnotations") \
  317. template(tag_runtime_invisible_annotations, "RuntimeInvisibleAnnotations") \
  318. template(tag_runtime_visible_parameter_annotations, "RuntimeVisibleParameterAnnotations") \
  319. template(tag_runtime_invisible_parameter_annotations,"RuntimeInvisibleParameterAnnotations") \
  320. template(tag_annotation_default, "AnnotationDefault") \
  321. template(tag_runtime_visible_type_annotations, "RuntimeVisibleTypeAnnotations") \
  322. template(tag_runtime_invisible_type_annotations, "RuntimeInvisibleTypeAnnotations") \
  323. template(tag_enclosing_method, "EnclosingMethod") \
  324. template(tag_bootstrap_methods, "BootstrapMethods") \
  325. \
  326. template(java_lang_ArithmeticException, "java/lang/ArithmeticException") \
  327. template(java_lang_ArrayIndexOutOfBoundsException, "java/lang/ArrayIndexOutOfBoundsException") \
  328. template(java_lang_ArrayStoreException, "java/lang/ArrayStoreException") \
  329. template(java_lang_ClassCastException, "java/lang/ClassCastException") \
  330. template(java_lang_ClassNotFoundException, "java/lang/ClassNotFoundException") \
  331. template(java_lang_CloneNotSupportedException, "java/lang/CloneNotSupportedException") \
  332. template(java_lang_IllegalAccessException, "java/lang/IllegalAccessException") \
  333. template(java_lang_IllegalArgumentException, "java/lang/IllegalArgumentException") \
  334. template(java_lang_IllegalStateException, "java/lang/IllegalStateException") \
  335. template(java_lang_IllegalMonitorStateException, "java/lang/IllegalMonitorStateException") \
  336. template(java_lang_IllegalThreadStateException, "java/lang/IllegalThreadStateException") \
  337. template(java_lang_IndexOutOfBoundsException, "java/lang/IndexOutOfBoundsException") \
  338. template(java_lang_InstantiationException, "java/lang/InstantiationException") \
  339. template(java_lang_InstantiationError, "java/lang/InstantiationError") \
  340. template(java_lang_InterruptedException, "java/lang/InterruptedException") \
  341. template(java_lang_BootstrapMethodError, "java/lang/BootstrapMethodError") \
  342. template(java_lang_LinkageError, "java/lang/LinkageError") \
  343. template(java_lang_NegativeArraySizeException, "java/lang/NegativeArraySizeException") \
  344. template(java_lang_NoSuchFieldException, "java/lang/NoSuchFieldException") \
  345. template(java_lang_NoSuchMethodException, "java/lang/NoSuchMethodException") \
  346. template(java_lang_NullPointerException, "java/lang/NullPointerException") \
  347. template(java_lang_StringIndexOutOfBoundsException, "java/lang/StringIndexOutOfBoundsException")\
  348. template(java_lang_UnsupportedOperationException, "java/lang/UnsupportedOperationException") \
  349. template(java_lang_InvalidClassException, "java/lang/InvalidClassException") \
  350. template(java_lang_reflect_InvocationTargetException, "java/lang/reflect/InvocationTargetException") \
  351. template(java_lang_Exception, "java/lang/Exception") \
  352. template(java_lang_RuntimeException, "java/lang/RuntimeException") \
  353. template(java_io_IOException, "java/io/IOException") \
  354. template(java_security_PrivilegedActionException, "java/security/PrivilegedActionException") \
  355. \
  356. template(java_lang_AbstractMethodError, "java/lang/AbstractMethodError") \
  357. template(java_lang_ClassCircularityError, "java/lang/ClassCircularityError") \
  358. template(java_lang_ClassFormatError, "java/lang/ClassFormatError") \
  359. template(java_lang_UnsupportedClassVersionError, "java/lang/UnsupportedClassVersionError") \
  360. template(java_lang_Error, "java/lang/Error") \
  361. template(java_lang_ExceptionInInitializerError, "java/lang/ExceptionInInitializerError") \
  362. template(java_lang_IllegalAccessError, "java/lang/IllegalAccessError") \
  363. template(java_lang_IncompatibleClassChangeError, "java/lang/IncompatibleClassChangeError") \
  364. template(java_lang_InternalError, "java/lang/InternalError") \
  365. template(java_lang_NoClassDefFoundError, "java/lang/NoClassDefFoundError") \
  366. template(java_lang_NoSuchFieldError, "java/lang/NoSuchFieldError") \
  367. template(java_lang_NoSuchMethodError, "java/lang/NoSuchMethodError") \
  368. template(java_lang_OutOfMemoryError, "java/lang/OutOfMemoryError") \
  369. template(java_lang_UnsatisfiedLinkError, "java/lang/UnsatisfiedLinkError") \
  370. template(java_lang_VerifyError, "java/lang/VerifyError") \
  371. template(java_lang_SecurityException, "java/lang/SecurityException") \
  372. template(java_lang_VirtualMachineError, "java/lang/VirtualMachineError") \
  373. template(java_lang_StackOverflowError, "java/lang/StackOverflowError") \
  374. template(java_lang_StackTraceElement, "java/lang/StackTraceElement") \
  375. \
  376. template(java_util_concurrent_locks_AbstractOwnableSynchronizer, "java/util/concurrent/locks/AbstractOwnableSynchronizer") \
  377. template(java_util_concurrent_atomic_AtomicIntegerFieldUpdater_Impl, "java/util/concurrent/atomic/AtomicIntegerFieldUpdater$AtomicIntegerFieldUpdaterImpl") \
  378. template(java_util_concurrent_atomic_AtomicLongFieldUpdater_CASUpdater, "java/util/concurrent/atomic/AtomicLongFieldUpdater$CASUpdater") \
  379. template(java_util_concurrent_atomic_AtomicLongFieldUpdater_LockedUpdater, "java/util/concurrent/atomic/AtomicLongFieldUpdater$LockedUpdater") \
  380. template(java_util_concurrent_atomic_AtomicReferenceFieldUpdater_Impl, "java/util/concurrent/atomic/AtomicReferenceFieldUpdater$AtomicReferenceFieldUpdaterImpl") \
  381. template(sun_misc_Contended_signature, "Lsun/misc/Contended;") \
  382. \
  383. VM_INTRINSICS_DO(VM_INTRINSIC_IGNORE, template, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE) \
  384. \
  385. \
  386. template(sun_reflect_FieldInfo, "sun/reflect/FieldInfo") \
  387. template(sun_reflect_MethodInfo, "sun/reflect/MethodInfo") \
  388. template(sun_reflect_MagicAccessorImpl, "sun/reflect/MagicAccessorImpl") \
  389. template(sun_reflect_MethodAccessorImpl, "sun/reflect/MethodAccessorImpl") \
  390. template(sun_reflect_ConstructorAccessorImpl, "sun/reflect/ConstructorAccessorImpl") \
  391. template(sun_reflect_SerializationConstructorAccessorImpl, "sun/reflect/SerializationConstructorAccessorImpl") \
  392. template(sun_reflect_DelegatingClassLoader, "sun/reflect/DelegatingClassLoader") \
  393. template(sun_reflect_Reflection, "sun/reflect/Reflection") \
  394. template(sun_reflect_CallerSensitive, "sun/reflect/CallerSensitive") \
  395. template(sun_reflect_CallerSensitive_signature, "Lsun/reflect/CallerSensitive;") \
  396. template(checkedExceptions_name, "checkedExceptions") \
  397. template(clazz_name, "clazz") \
  398. template(exceptionTypes_name, "exceptionTypes") \
  399. template(modifiers_name, "modifiers") \
  400. template(newConstructor_name, "newConstructor") \
  401. template(newConstructor_signature, "(Lsun/reflect/MethodInfo;)Ljava/lang/reflect/Constructor;") \
  402. template(newField_name, "newField") \
  403. template(newField_signature, "(Lsun/reflect/FieldInfo;)Ljava/lang/reflect/Field;") \
  404. template(newMethod_name, "newMethod") \
  405. template(newMethod_signature, "(Lsun/reflect/MethodInfo;)Ljava/lang/reflect/Method;") \
  406. template(invokeBasic_name, "invokeBasic") \
  407. template(linkToVirtual_name, "linkToVirtual") \
  408. template(linkToStatic_name, "linkToStatic") \
  409. template(linkToSpecial_name, "linkToSpecial") \
  410. template(linkToInterface_name, "linkToInterface") \
  411. template(compiledLambdaForm_name, "<compiledLambdaForm>") /*fake name*/ \
  412. template(star_name, "*") /*not really a name*/ \
  413. template(invoke_name, "invoke") \
  414. template(override_name, "override") \
  415. template(parameterTypes_name, "parameterTypes") \
  416. template(returnType_name, "returnType") \
  417. template(signature_name, "signature") \
  418. template(slot_name, "slot") \
  419. \
  420. \
  421. template(annotations_name, "annotations") \
  422. template(index_name, "index") \
  423. template(executable_name, "executable") \
  424. template(parameter_annotations_name, "parameterAnnotations") \
  425. template(annotation_default_name, "annotationDefault") \
  426. template(sun_reflect_ConstantPool, "sun/reflect/ConstantPool") \
  427. template(ConstantPool_name, "constantPoolOop") \
  428. template(sun_reflect_UnsafeStaticFieldAccessorImpl, "sun/reflect/UnsafeStaticFieldAccessorImpl")\
  429. template(base_name, "base") \
  430. template(type_annotations_name, "typeAnnotations") \
  431. \
  432. \
  433. template(java_lang_invoke_CallSite, "java/lang/invoke/CallSite") \
  434. template(java_lang_invoke_ConstantCallSite, "java/lang/invoke/ConstantCallSite") \
  435. template(java_lang_invoke_DirectMethodHandle, "java/lang/invoke/DirectMethodHandle") \
  436. template(java_lang_invoke_MutableCallSite, "java/lang/invoke/MutableCallSite") \
  437. template(java_lang_invoke_VolatileCallSite, "java/lang/invoke/VolatileCallSite") \
  438. template(java_lang_invoke_MethodHandle, "java/lang/invoke/MethodHandle") \
  439. template(java_lang_invoke_MethodType, "java/lang/invoke/MethodType") \
  440. template(java_lang_invoke_MethodType_signature, "Ljava/lang/invoke/MethodType;") \
  441. template(java_lang_invoke_MemberName_signature, "Ljava/lang/invoke/MemberName;") \
  442. template(java_lang_invoke_LambdaForm_signature, "Ljava/lang/invoke/LambdaForm;") \
  443. template(java_lang_invoke_MethodHandle_signature, "Ljava/lang/invoke/MethodHandle;") \
  444. template(java_lang_invoke_MemberName, "java/lang/invoke/MemberName") \
  445. template(java_lang_invoke_MethodHandleNatives, "java/lang/invoke/MethodHandleNatives") \
  446. template(java_lang_invoke_LambdaForm, "java/lang/invoke/LambdaForm") \
  447. template(java_lang_invoke_ForceInline_signature, "Ljava/lang/invoke/ForceInline;") \
  448. template(java_lang_invoke_DontInline_signature, "Ljava/lang/invoke/DontInline;") \
  449. template(java_lang_invoke_InjectedProfile_signature, "Ljava/lang/invoke/InjectedProfile;") \
  450. template(java_lang_invoke_Stable_signature, "Ljava/lang/invoke/Stable;") \
  451. template(java_lang_invoke_LambdaForm_Compiled_signature, "Ljava/lang/invoke/LambdaForm$Compiled;") \
  452. template(java_lang_invoke_LambdaForm_Hidden_signature, "Ljava/lang/invoke/LambdaForm$Hidden;") \
  453. template(findMethodHandleType_name, "findMethodHandleType") \
  454. template(findMethodHandleType_signature, "(Ljava/lang/Class;[Ljava/lang/Class;)Ljava/lang/invoke/MethodType;") \
  455. template(linkMethodHandleConstant_name, "linkMethodHandleConstant") \
  456. template(linkMethodHandleConstant_signature, "(Ljava/lang/Class;ILjava/lang/Class;Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/invoke/MethodHandle;") \
  457. template(linkMethod_name, "linkMethod") \
  458. template(linkMethod_signature, "(Ljava/lang/Class;ILjava/lang/Class;Ljava/lang/String;Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/invoke/MemberName;") \
  459. template(linkCallSite_name, "linkCallSite") \
  460. template(linkCallSite_signature, "(Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/invoke/MemberName;") \
  461. template(setTargetNormal_name, "setTargetNormal") \
  462. template(setTargetVolatile_name, "setTargetVolatile") \
  463. template(setTarget_signature, "(Ljava/lang/invoke/MethodHandle;)V") \
  464. NOT_LP64( do_alias(intptr_signature, int_signature) ) \
  465. LP64_ONLY( do_alias(intptr_signature, long_signature) ) \
  466. \
  467. template(object_initializer_name, "<init>") \
  468. template(class_initializer_name, "<clinit>") \
  469. template(println_name, "println") \
  470. template(printStackTrace_name, "printStackTrace") \
  471. template(main_name, "main") \
  472. template(name_name, "name") \
  473. template(priority_name, "priority") \
  474. template(stillborn_name, "stillborn") \
  475. template(group_name, "group") \
  476. template(daemon_name, "daemon") \
  477. template(eetop_name, "eetop") \
  478. template(thread_status_name, "threadStatus") \
  479. template(run_method_name, "run") \
  480. template(exit_method_name, "exit") \
  481. template(add_method_name, "add") \
  482. template(remove_method_name, "remove") \
  483. template(parent_name, "parent") \
  484. template(threads_name, "threads") \
  485. template(groups_name, "groups") \
  486. template(maxPriority_name, "maxPriority") \
  487. template(destroyed_name, "destroyed") \
  488. template(vmAllowSuspension_name, "vmAllowSuspension") \
  489. template(nthreads_name, "nthreads") \
  490. template(ngroups_name, "ngroups") \
  491. template(shutdown_method_name, "shutdown") \
  492. template(finalize_method_name, "finalize") \
  493. template(reference_lock_name, "lock") \
  494. template(reference_discovered_name, "discovered") \
  495. template(run_finalization_name, "runFinalization") \
  496. template(run_finalizers_on_exit_name, "runFinalizersOnExit") \
  497. template(uncaughtException_name, "uncaughtException") \
  498. template(dispatchUncaughtException_name, "dispatchUncaughtException") \
  499. template(initializeSystemClass_name, "initializeSystemClass") \
  500. template(loadClass_name, "loadClass") \
  501. template(loadClassInternal_name, "loadClassInternal") \
  502. template(get_name, "get") \
  503. template(put_name, "put") \
  504. template(type_name, "type") \
  505. template(findNative_name, "findNative") \
  506. template(deadChild_name, "deadChild") \
  507. template(addClass_name, "addClass") \
  508. template(throwIllegalAccessError_name, "throwIllegalAccessError") \
  509. template(getFromClass_name, "getFromClass") \
  510. template(dispatch_name, "dispatch") \
  511. template(getSystemClassLoader_name, "getSystemClassLoader") \
  512. template(fillInStackTrace_name, "fillInStackTrace") \
  513. template(fillInStackTrace0_name, "fillInStackTrace0") \
  514. template(getCause_name, "getCause") \
  515. template(initCause_name, "initCause") \
  516. template(setProperty_name, "setProperty") \
  517. template(getProperty_name, "getProperty") \
  518. template(context_name, "context") \
  519. template(privilegedContext_name, "privilegedContext") \
  520. template(contextClassLoader_name, "contextClassLoader") \
  521. template(inheritedAccessControlContext_name, "inheritedAccessControlContext") \
  522. template(isPrivileged_name, "isPrivileged") \
  523. template(isAuthorized_name, "isAuthorized") \
  524. template(getClassContext_name, "getClassContext") \
  525. template(wait_name, "wait") \
  526. template(checkPackageAccess_name, "checkPackageAccess") \
  527. template(stackSize_name, "stackSize") \
  528. template(thread_id_name, "tid") \
  529. template(newInstance0_name, "newInstance0") \
  530. template(limit_name, "limit") \
  531. template(member_name, "member") \
  532. template(forName_name, "forName") \
  533. template(forName0_name, "forName0") \
  534. template(isJavaIdentifierStart_name, "isJavaIdentifierStart") \
  535. template(isJavaIdentifierPart_name, "isJavaIdentifierPart") \
  536. template(exclusive_owner_thread_name, "exclusiveOwnerThread") \
  537. template(park_blocker_name, "parkBlocker") \
  538. template(park_event_name, "nativeParkEventPointer") \
  539. template(cache_field_name, "cache") \
  540. template(value_name, "value") \
  541. template(offset_name, "offset") \
  542. template(count_name, "count") \
  543. template(hash_name, "hash") \
  544. template(numberOfLeadingZeros_name, "numberOfLeadingZeros") \
  545. template(numberOfTrailingZeros_name, "numberOfTrailingZeros") \
  546. template(bitCount_name, "bitCount") \
  547. template(profile_name, "profile") \
  548. template(equals_name, "equals") \
  549. template(target_name, "target") \
  550. template(toString_name, "toString") \
  551. template(values_name, "values") \
  552. template(receiver_name, "receiver") \
  553. template(vmtarget_name, "vmtarget") \
  554. template(next_target_name, "next_target") \
  555. template(vmloader_name, "vmloader") \
  556. template(vmindex_name, "vmindex") \
  557. template(vmcount_name, "vmcount") \
  558. template(vmentry_name, "vmentry") \
  559. template(flags_name, "flags") \
  560. template(rtype_name, "rtype") \
  561. template(ptypes_name, "ptypes") \
  562. template(form_name, "form") \
  563. template(basicType_name, "basicType") \
  564. template(append_name, "append") \
  565. template(klass_name, "klass") \
  566. template(array_klass_name, "array_klass") \
  567. template(oop_size_name, "oop_size") \
  568. template(static_oop_field_count_name, "static_oop_field_count") \
  569. template(protection_domain_name, "protection_domain") \
  570. template(init_lock_name, "init_lock") \
  571. template(signers_name, "signers_name") \
  572. template(loader_data_name, "loader_data") \
  573. template(dependencies_name, "dependencies") \
  574. template(input_stream_void_signature, "(Ljava/io/InputStream;)V") \
  575. template(getFileURL_name, "getFileURL") \
  576. template(getFileURL_signature, "(Ljava/io/File;)Ljava/net/URL;") \
  577. template(definePackageInternal_name, "definePackageInternal") \
  578. template(definePackageInternal_signature, "(Ljava/lang/String;Ljava/util/jar/Manifest;Ljava/net/URL;)V") \
  579. template(getProtectionDomain_name, "getProtectionDomain") \
  580. template(getProtectionDomain_signature, "(Ljava/security/CodeSource;)Ljava/security/ProtectionDomain;") \
  581. template(url_code_signer_array_void_signature, "(Ljava/net/URL;[Ljava/security/CodeSigner;)V") \
  582. template(resolved_references_name, "<resolved_references>") \
  583. template(referencequeue_null_name, "NULL") \
  584. template(referencequeue_enqueued_name, "ENQUEUED") \
  585. \
  586. template(register_method_name, "register") \
  587. do_alias(register_method_signature, object_void_signature) \
  588. \
  589. VM_INTRINSICS_DO(VM_INTRINSIC_IGNORE, VM_SYMBOL_IGNORE, template, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE) \
  590. \
  591. template(void_method_signature, "()V") \
  592. template(void_boolean_signature, "()Z") \
  593. template(void_byte_signature, "()B") \
  594. template(void_char_signature, "()C") \
  595. template(void_short_signature, "()S") \
  596. template(void_int_signature, "()I") \
  597. template(void_long_signature, "()J") \
  598. template(void_float_signature, "()F") \
  599. template(void_double_signature, "()D") \
  600. template(int_void_signature, "(I)V") \
  601. template(int_int_signature, "(I)I") \
  602. template(char_char_signature, "(C)C") \
  603. template(short_short_signature, "(S)S") \
  604. template(int_bool_signature, "(I)Z") \
  605. template(float_int_signature, "(F)I") \
  606. template(double_long_signature, "(D)J") \
  607. template(double_double_signature, "(D)D") \
  608. template(int_float_signature, "(I)F") \
  609. template(long_int_signature, "(J)I") \
  610. template(long_long_signature, "(J)J") \
  611. template(long_double_signature, "(J)D") \
  612. template(byte_signature, "B") \
  613. template(char_signature, "C") \
  614. template(double_signature, "D") \
  615. template(float_signature, "F") \
  616. template(int_signature, "I") \
  617. template(long_signature, "J") \
  618. template(short_signature, "S") \
  619. template(bool_signature, "Z") \
  620. template(void_signature, "V") \
  621. template(byte_array_signature, "[B") \
  622. template(char_array_signature, "[C") \
  623. template(int_array_signature, "[I") \
  624. template(object_void_signature, "(Ljava/lang/Object;)V") \
  625. template(object_int_signature, "(Ljava/lang/Object;)I") \
  626. template(object_boolean_signature, "(Ljava/lang/Object;)Z") \
  627. template(string_void_signature, "(Ljava/lang/String;)V") \
  628. template(string_int_signature, "(Ljava/lang/String;)I") \
  629. template(throwable_void_signature, "(Ljava/lang/Throwable;)V") \
  630. template(void_throwable_signature, "()Ljava/lang/Throwable;") \
  631. template(throwable_throwable_signature, "(Ljava/lang/Throwable;)Ljava/lang/Throwable;") \
  632. template(class_void_signature, "(Ljava/lang/Class;)V") \
  633. template(class_int_signature, "(Ljava/lang/Class;)I") \
  634. template(class_long_signature, "(Ljava/lang/Class;)J") \
  635. template(class_boolean_signature, "(Ljava/lang/Class;)Z") \
  636. template(throwable_string_void_signature, "(Ljava/lang/Throwable;Ljava/lang/String;)V") \
  637. template(string_array_void_signature, "([Ljava/lang/String;)V") \
  638. template(string_array_string_array_void_signature, "([Ljava/lang/String;[Ljava/lang/String;)V") \
  639. template(thread_throwable_void_signature, "(Ljava/lang/Thread;Ljava/lang/Throwable;)V") \
  640. template(thread_void_signature, "(Ljava/lang/Thread;)V") \
  641. template(threadgroup_runnable_void_signature, "(Ljava/lang/ThreadGroup;Ljava/lang/Runnable;)V") \
  642. template(threadgroup_string_void_signature, "(Ljava/lang/ThreadGroup;Ljava/lang/String;)V") \
  643. template(string_class_signature, "(Ljava/lang/String;)Ljava/lang/Class;") \
  644. template(object_object_object_signature, "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;") \
  645. template(string_string_string_signature, "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;") \
  646. template(string_string_signature, "(Ljava/lang/String;)Ljava/lang/String;") \
  647. template(classloader_string_long_signature, "(Ljava/lang/ClassLoader;Ljava/lang/String;)J") \
  648. template(byte_array_void_signature, "([B)V") \
  649. template(char_array_void_signature, "([C)V") \
  650. template(int_int_void_signature, "(II)V") \
  651. template(long_long_void_signature, "(JJ)V") \
  652. template(void_classloader_signature, "()Ljava/lang/ClassLoader;") \
  653. template(void_object_signature, "()Ljava/lang/Object;") \
  654. template(void_class_signature, "()Ljava/lang/Class;") \
  655. template(void_class_array_signature, "()[Ljava/lang/Class;") \
  656. template(void_string_signature, "()Ljava/lang/String;") \
  657. template(object_array_object_signature, "([Ljava/lang/Object;)Ljava/lang/Object;") \
  658. template(object_object_array_object_signature, "(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;")\
  659. template(exception_void_signature, "(Ljava/lang/Exception;)V") \
  660. template(protectiondomain_signature, "[Ljava/security/ProtectionDomain;") \
  661. template(accesscontrolcontext_signature, "Ljava/security/AccessControlContext;") \
  662. template(class_protectiondomain_signature, "(Ljava/lang/Class;Ljava/security/ProtectionDomain;)V") \
  663. template(thread_signature, "Ljava/lang/Thread;") \
  664. template(thread_array_signature, "[Ljava/lang/Thread;") \
  665. template(threadgroup_signature, "Ljava/lang/ThreadGroup;") \
  666. template(threadgroup_array_signature, "[Ljava/lang/ThreadGroup;") \
  667. template(class_array_signature, "[Ljava/lang/Class;") \
  668. template(classloader_signature, "Ljava/lang/ClassLoader;") \
  669. template(object_signature, "Ljava/lang/Object;") \
  670. template(object_array_signature, "[Ljava/lang/Object;") \
  671. template(class_signature, "Ljava/lang/Class;") \
  672. template(string_signature, "Ljava/lang/String;") \
  673. template(reference_signature, "Ljava/lang/ref/Reference;") \
  674. template(referencequeue_signature, "Ljava/lang/ref/ReferenceQueue;") \
  675. template(executable_signature, "Ljava/lang/reflect/Executable;") \
  676. template(concurrenthashmap_signature, "Ljava/util/concurrent/ConcurrentHashMap;") \
  677. template(String_StringBuilder_signature, "(Ljava/lang/String;)Ljava/lang/StringBuilder;") \
  678. template(int_StringBuilder_signature, "(I)Ljava/lang/StringBuilder;") \
  679. template(char_StringBuilder_signature, "(C)Ljava/lang/StringBuilder;") \
  680. template(String_StringBuffer_signature, "(Ljava/lang/String;)Ljava/lang/StringBuffer;") \
  681. template(int_StringBuffer_signature, "(I)Ljava/lang/StringBuffer;") \
  682. template(char_StringBuffer_signature, "(C)Ljava/lang/StringBuffer;") \
  683. template(int_String_signature, "(I)Ljava/lang/String;") \
  684. template(codesource_permissioncollection_signature, "(Ljava/security/CodeSource;Ljava/security/PermissionCollection;)V") \
  685. VM_INTRINSICS_DO(VM_INTRINSIC_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, template, VM_ALIAS_IGNORE) \
  686. \
  687. VM_INTRINSICS_DO(VM_INTRINSIC_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, do_alias) \
  688. \
  689. template(dummy_symbol, "illegal symbol") \
  690. \
  691. template(unknown_class_name, "<Unknown>") \
  692. \
  693. template(parallelCapable_name, "parallelLockMap") \
  694. \
  695. template(java_lang_StackTraceElement_array, "[Ljava/lang/StackTraceElement;") \
  696. template(java_lang_management_ThreadState, "java/lang/management/ThreadState") \
  697. template(java_lang_management_MemoryUsage, "java/lang/management/MemoryUsage") \
  698. template(java_lang_management_ThreadInfo, "java/lang/management/ThreadInfo") \
  699. template(sun_management_ManagementFactory, "sun/management/ManagementFactory") \
  700. template(sun_management_Sensor, "sun/management/Sensor") \
  701. template(sun_management_Agent, "sun/management/Agent") \
  702. template(sun_management_DiagnosticCommandImpl, "sun/management/DiagnosticCommandImpl") \
  703. template(sun_management_GarbageCollectorImpl, "sun/management/GarbageCollectorImpl") \
  704. template(sun_management_ManagementFactoryHelper, "sun/management/ManagementFactoryHelper") \
  705. template(getDiagnosticCommandMBean_name, "getDiagnosticCommandMBean") \
  706. template(getDiagnosticCommandMBean_signature, "()Lcom/sun/management/DiagnosticCommandMBean;") \
  707. template(getGcInfoBuilder_name, "getGcInfoBuilder") \
  708. template(getGcInfoBuilder_signature, "()Lsun/management/GcInfoBuilder;") \
  709. template(com_sun_management_GcInfo, "com/sun/management/GcInfo") \
  710. template(com_sun_management_GcInfo_constructor_signature, "(Lsun/management/GcInfoBuilder;JJJ[Ljava/lang/management/MemoryUsage;[Ljava/lang/management/MemoryUsage;[Ljava/lang/Object;)V") \
  711. template(createGCNotification_name, "createGCNotification") \
  712. template(createGCNotification_signature, "(JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;Lcom/sun/management/GcInfo;)V") \
  713. template(createDiagnosticFrameworkNotification_name, "createDiagnosticFrameworkNotification") \
  714. template(createMemoryPoolMBean_name, "createMemoryPoolMBean") \
  715. template(createMemoryManagerMBean_name, "createMemoryManagerMBean") \
  716. template(createGarbageCollectorMBean_name, "createGarbageCollectorMBean") \
  717. template(createMemoryPoolMBean_signature, "(Ljava/lang/String;ZJJ)Ljava/lang/management/MemoryPoolMBean;") \
  718. template(createMemoryManagerMBean_signature, "(Ljava/lang/String;)Ljava/lang/management/MemoryManagerMBean;") \
  719. template(createGarbageCollectorMBean_signature, "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/management/GarbageCollectorMBean;") \
  720. template(trigger_name, "trigger") \
  721. template(clear_name, "clear") \
  722. template(trigger_method_signature, "(ILjava/lang/management/MemoryUsage;)V") \
  723. template(startAgent_name, "startAgent") \
  724. template(startRemoteAgent_name, "startRemoteManagementAgent") \
  725. template(startLocalAgent_name, "startLocalManagementAgent") \
  726. template(stopRemoteAgent_name, "stopRemoteManagementAgent") \
  727. template(java_lang_management_ThreadInfo_constructor_signature, "(Ljava/lang/Thread;ILjava/lang/Object;Ljava/lang/Thread;JJJJ[Ljava/lang/StackTraceElement;)V") \
  728. template(java_lang_management_ThreadInfo_with_locks_constructor_signature, "(Ljava/lang/Thread;ILjava/lang/Object;Ljava/lang/Thread;JJJJ[Ljava/lang/StackTraceElement;[Ljava/lang/Object;[I[Ljava/lang/Object;)V") \
  729. template(long_long_long_long_void_signature, "(JJJJ)V") \
  730. template(finalizer_histogram_klass, "java/lang/ref/FinalizerHistogram") \
  731. template(void_finalizer_histogram_entry_array_signature, "()[Ljava/lang/ref/FinalizerHistogram$Entry;") \
  732. template(get_finalizer_histogram_name, "getFinalizerHistogram") \
  733. template(finalizer_histogram_entry_name_field, "className") \
  734. template(finalizer_histogram_entry_count_field, "instanceCount") \
  735. \
  736. template(java_lang_management_MemoryPoolMXBean, "java/lang/management/MemoryPoolMXBean") \
  737. template(java_lang_management_MemoryManagerMXBean, "java/lang/management/MemoryManagerMXBean") \
  738. template(java_lang_management_GarbageCollectorMXBean,"java/lang/management/GarbageCollectorMXBean") \
  739. template(gcInfoBuilder_name, "gcInfoBuilder") \
  740. template(createMemoryPool_name, "createMemoryPool") \
  741. template(createMemoryManager_name, "createMemoryManager") \
  742. template(createGarbageCollector_name, "createGarbageCollector") \
  743. template(createMemoryPool_signature, "(Ljava/lang/String;ZJJ)Ljava/lang/management/MemoryPoolMXBean;") \
  744. template(createMemoryManager_signature, "(Ljava/lang/String;)Ljava/lang/management/MemoryManagerMXBean;") \
  745. template(createGarbageCollector_signature, "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/management/GarbageCollectorMXBean;") \
  746. template(addThreadDumpForMonitors_name, "addThreadDumpForMonitors") \
  747. template(addThreadDumpForSynchronizers_name, "addThreadDumpForSynchronizers") \
  748. template(addThreadDumpForMonitors_signature, "(Ljava/lang/management/ThreadInfo;[Ljava/lang/Object;[I)V") \
  749. template(addThreadDumpForSynchronizers_signature, "(Ljava/lang/management/ThreadInfo;[Ljava/lang/Object;)V") \
  750. \
  751. template(sun_misc_VMSupport, "sun/misc/VMSupport") \
  752. template(appendToClassPathForInstrumentation_name, "appendToClassPathForInstrumentation") \
  753. do_alias(appendToClassPathForInstrumentation_signature, string_void_signature) \
  754. template(serializePropertiesToByteArray_name, "serializePropertiesToByteArray") \
  755. template(serializePropertiesToByteArray_signature, "()[B") \
  756. template(serializeAgentPropertiesToByteArray_name, "serializeAgentPropertiesToByteArray") \
  757. template(classRedefinedCount_name, "classRedefinedCount") \
  758. template(classLoader_name, "classLoader") \
  759. \
  760. JFR_TEMPLATES(template) \
  761. \
  762. #define VM_INTRINSICS_DO(do_intrinsic, do_class, do_name, do_signature, do_alias) \
  763. do_intrinsic(_hashCode, java_lang_Object, hashCode_name, void_int_signature, F_R) \
  764. do_name( hashCode_name, "hashCode") \
  765. do_intrinsic(_getClass, java_lang_Object, getClass_name, void_class_signature, F_R) \
  766. do_name( getClass_name, "getClass") \
  767. do_intrinsic(_clone, java_lang_Object, clone_name, void_object_signature, F_R) \
  768. do_name( clone_name, "clone") \
  769. \
  770. do_class(java_lang_Math, "java/lang/Math") \
  771. do_class(java_lang_StrictMath, "java/lang/StrictMath") \
  772. do_signature(double2_double_signature, "(DD)D") \
  773. do_signature(int2_int_signature, "(II)I") \
  774. do_signature(long2_long_signature, "(JJ)J") \
  775. \
  776. do_name(abs_name,"abs") do_name(sin_name,"sin") do_name(cos_name,"cos") \
  777. do_name(tan_name,"tan") do_name(atan2_name,"atan2") do_name(sqrt_name,"sqrt") \
  778. do_name(log_name,"log") do_name(log10_name,"log10") do_name(pow_name,"pow") \
  779. do_name(exp_name,"exp") do_name(min_name,"min") do_name(max_name,"max") \
  780. \
  781. do_name(addExact_name,"addExact") \
  782. do_name(decrementExact_name,"decrementExact") \
  783. do_name(incrementExact_name,"incrementExact") \
  784. do_name(multiplyExact_name,"multiplyExact") \
  785. do_name(negateExact_name,"negateExact") \
  786. do_name(subtractExact_name,"subtractExact") \
  787. \
  788. do_intrinsic(_dabs, java_lang_Math, abs_name, double_double_signature, F_S) \
  789. do_intrinsic(_dsin, java_lang_Math, sin_name, double_double_signature, F_S) \
  790. do_intrinsic(_dcos, java_lang_Math, cos_name, double_double_signature, F_S) \
  791. do_intrinsic(_dtan, java_lang_Math, tan_name, double_double_signature, F_S) \
  792. do_intrinsic(_datan2, java_lang_Math, atan2_name, double2_double_signature, F_S) \
  793. do_intrinsic(_dsqrt, java_lang_Math, sqrt_name, double_double_signature, F_S) \
  794. do_intrinsic(_dlog, java_lang_Math, log_name, double_double_signature, F_S) \
  795. do_intrinsic(_dlog10, java_lang_Math, log10_name, double_double_signature, F_S) \
  796. do_intrinsic(_dpow, java_lang_Math, pow_name, double2_double_signature, F_S) \
  797. do_intrinsic(_dexp, java_lang_Math, exp_name, double_double_signature, F_S) \
  798. do_intrinsic(_min, java_lang_Math, min_name, int2_int_signature, F_S) \
  799. do_intrinsic(_max, java_lang_Math, max_name, int2_int_signature, F_S) \
  800. do_intrinsic(_addExactI, java_lang_Math, addExact_name, int2_int_signature, F_S) \
  801. do_intrinsic(_addExactL, java_lang_Math, addExact_name, long2_long_signature, F_S) \
  802. do_intrinsic(_decrementExactI, java_lang_Math, decrementExact_name, int_int_signature, F_S) \
  803. do_intrinsic(_decrementExactL, java_lang_Math, decrementExact_name, long_long_signature, F_S) \
  804. do_intrinsic(_incrementExactI, java_lang_Math, incrementExact_name, int_int_signature, F_S) \
  805. do_intrinsic(_incrementExactL, java_lang_Math, incrementExact_name, long_long_signature, F_S) \
  806. do_intrinsic(_multiplyExactI, java_lang_Math, multiplyExact_name, int2_int_signature, F_S) \
  807. do_intrinsic(_multiplyExactL, java_lang_Math, multiplyExact_name, long2_long_signature, F_S) \
  808. do_intrinsic(_negateExactI, java_lang_Math, negateExact_name, int_int_signature, F_S) \
  809. do_intrinsic(_negateExactL, java_lang_Math, negateExact_name, long_long_signature, F_S) \
  810. do_intrinsic(_subtractExactI, java_lang_Math, subtractExact_name, int2_int_signature, F_S) \
  811. do_intrinsic(_subtractExactL, java_lang_Math, subtractExact_name, long2_long_signature, F_S) \
  812. \
  813. do_intrinsic(_floatToRawIntBits, java_lang_Float, floatToRawIntBits_name, float_int_signature, F_S) \
  814. do_name( floatToRawIntBits_name, "floatToRawIntBits") \
  815. do_intrinsic(_floatToIntBits, java_lang_Float, floatToIntBits_name, float_int_signature, F_S) \
  816. do_name( floatToIntBits_name, "floatToIntBits") \
  817. do_intrinsic(_intBitsToFloat, java_lang_Float, intBitsToFloat_name, int_float_signature, F_S) \
  818. do_name( intBitsToFloat_name, "intBitsToFloat") \
  819. do_intrinsic(_doubleToRawLongBits, java_lang_Double, doubleToRawLongBits_name, double_long_signature, F_S) \
  820. do_name( doubleToRawLongBits_name, "doubleToRawLongBits") \
  821. do_intrinsic(_doubleToLongBits, java_lang_Double, doubleToLongBits_name, double_long_signature, F_S) \
  822. do_name( doubleToLongBits_name, "doubleToLongBits") \
  823. do_intrinsic(_longBitsToDouble, java_lang_Double, longBitsToDouble_name, long_double_signature, F_S) \
  824. do_name( longBitsToDouble_name, "longBitsToDouble") \
  825. \
  826. do_intrinsic(_numberOfLeadingZeros_i, java_lang_Integer, numberOfLeadingZeros_name,int_int_signature, F_S) \
  827. do_intrinsic(_numberOfLeadingZeros_l, java_lang_Long, numberOfLeadingZeros_name,long_int_signature, F_S) \
  828. \
  829. do_intrinsic(_numberOfTrailingZeros_i, java_lang_Integer, numberOfTrailingZeros_name,int_int_signature, F_S) \
  830. do_intrinsic(_numberOfTrailingZeros_l, java_lang_Long, numberOfTrailingZeros_name,long_int_signature, F_S) \
  831. \
  832. do_intrinsic(_bitCount_i, java_lang_Integer, bitCount_name, int_int_signature, F_S) \
  833. do_intrinsic(_bitCount_l, java_lang_Long, bitCount_name, long_int_signature, F_S) \
  834. \
  835. do_intrinsic(_reverseBytes_i, java_lang_Integer, reverseBytes_name, int_int_signature, F_S) \
  836. do_name( reverseBytes_name, "reverseBytes") \
  837. do_intrinsic(_reverseBytes_l, java_lang_Long, reverseBytes_name, long_long_signature, F_S) \
  838. do_intrinsic(_reverseBytes_c, java_lang_Character, reverseBytes_name, char_char_signature, F_S) \
  839. do_intrinsic(_reverseBytes_s, java_lang_Short, reverseBytes_name, short_short_signature, F_S) \
  840. \
  841. do_intrinsic(_identityHashCode, java_lang_System, identityHashCode_name, object_int_signature, F_S) \
  842. do_name( identityHashCode_name, "identityHashCode") \
  843. do_intrinsic(_currentTimeMillis, java_lang_System, currentTimeMillis_name, void_long_signature, F_S) \
  844. \
  845. do_name( currentTimeMillis_name, "currentTimeMillis") \
  846. do_intrinsic(_nanoTime, java_lang_System, nanoTime_name, void_long_signature, F_S) \
  847. do_name( nanoTime_name, "nanoTime") \
  848. \
  849. JFR_INTRINSICS(do_intrinsic, do_class, do_name, do_signature, do_alias) \
  850. \
  851. do_intrinsic(_arraycopy, java_lang_System, arraycopy_name, arraycopy_signature, F_S) \
  852. do_name( arraycopy_name, "arraycopy") \
  853. do_signature(arraycopy_signature, "(Ljava/lang/Object;ILjava/lang/Object;II)V") \
  854. do_intrinsic(_isInterrupted, java_lang_Thread, isInterrupted_name, isInterrupted_signature, F_R) \
  855. do_name( isInterrupted_name, "isInterrupted") \
  856. do_signature(isInterrupted_signature, "(Z)Z") \
  857. do_intrinsic(_currentThread, java_lang_Thread, currentThread_name, currentThread_signature, F_S) \
  858. do_name( currentThread_name, "currentThread") \
  859. do_signature(currentThread_signature, "()Ljava/lang/Thread;") \
  860. \
  861. do_intrinsic(_isAssignableFrom, java_lang_Class, isAssignableFrom_name, class_boolean_signature, F_RN) \
  862. do_name( isAssignableFrom_name, "isAssignableFrom") \
  863. do_intrinsic(_isInstance, java_lang_Class, isInstance_name, object_boolean_signature, F_RN) \
  864. do_name( isInstance_name, "isInstance") \
  865. do_intrinsic(_getModifiers, java_lang_Class, getModifiers_name, void_int_signature, F_RN) \
  866. do_name( getModifiers_name, "getModifiers") \
  867. do_intrinsic(_isInterface, java_lang_Class, isInterface_name, void_boolean_signature, F_RN) \
  868. do_name( isInterface_name, "isInterface") \
  869. do_intrinsic(_isArray, java_lang_Class, isArray_name, void_boolean_signature, F_RN) \
  870. do_name( isArray_name, "isArray") \
  871. do_intrinsic(_isPrimitive, java_lang_Class, isPrimitive_name, void_boolean_signature, F_RN) \
  872. do_name( isPrimitive_name, "isPrimitive") \
  873. do_intrinsic(_getSuperclass, java_lang_Class, getSuperclass_name, void_class_signature, F_RN) \
  874. do_name( getSuperclass_name, "getSuperclass") \
  875. do_intrinsic(_getComponentType, java_lang_Class, getComponentType_name, void_class_signature, F_RN) \
  876. do_name( getComponentType_name, "getComponentType") \
  877. \
  878. do_intrinsic(_getClassAccessFlags, sun_reflect_Reflection, getClassAccessFlags_name, class_int_signature, F_SN) \
  879. do_name( getClassAccessFlags_name, "getClassAccessFlags") \
  880. do_intrinsic(_getLength, java_lang_reflect_Array, getLength_name, object_int_signature, F_SN) \
  881. do_name( getLength_name, "getLength") \
  882. \
  883. do_intrinsic(_getCallerClass, sun_reflect_Reflection, getCallerClass_name, void_class_signature, F_SN) \
  884. do_name( getCallerClass_name, "getCallerClass") \
  885. \
  886. do_intrinsic(_newArray, java_lang_reflect_Array, newArray_name, newArray_signature, F_SN) \
  887. do_name( newArray_name, "newArray") \
  888. do_signature(newArray_signature, "(Ljava/lang/Class;I)Ljava/lang/Object;") \
  889. \
  890. do_intrinsic(_copyOf, java_util_Arrays, copyOf_name, copyOf_signature, F_S) \
  891. do_name( copyOf_name, "copyOf") \
  892. do_signature(copyOf_signature, "([Ljava/lang/Object;ILjava/lang/Class;)[Ljava/lang/Object;") \
  893. \
  894. do_intrinsic(_copyOfRange, java_util_Arrays, copyOfRange_name, copyOfRange_signature, F_S) \
  895. do_name( copyOfRange_name, "copyOfRange") \
  896. do_signature(copyOfRange_signature, "([Ljava/lang/Object;IILjava/lang/Class;)[Ljava/lang/Object;") \
  897. \
  898. do_intrinsic(_equalsC, java_util_Arrays, equals_name, equalsC_signature, F_S) \
  899. do_signature(equalsC_signature, "([C[C)Z") \
  900. \
  901. do_intrinsic(_compareTo, java_lang_String, compareTo_name, string_int_signature, F_R) \
  902. do_name( compareTo_name, "compareTo") \
  903. do_intrinsic(_indexOf, java_lang_String, indexOf_name, string_int_signature, F_R) \
  904. do_name( indexOf_name, "indexOf") \
  905. do_intrinsic(_equals, java_lang_String, equals_name, object_boolean_signature, F_R) \
  906. \
  907. do_class(java_nio_Buffer, "java/nio/Buffer") \
  908. do_intrinsic(_checkIndex, java_nio_Buffer, checkIndex_name, int_int_signature, F_R) \
  909. do_name( checkIndex_name, "checkIndex") \
  910. \
  911. do_class(sun_nio_cs_iso8859_1_Encoder, "sun/nio/cs/ISO_8859_1$Encoder") \
  912. do_intrinsic(_encodeISOArray, sun_nio_cs_iso8859_1_Encoder, encodeISOArray_name, encodeISOArray_signature, F_S) \
  913. do_name( encodeISOArray_name, "encodeISOArray") \
  914. do_signature(encodeISOArray_signature, "([CI[BII)I") \
  915. \
  916. do_class(java_math_BigInteger, "java/math/BigInteger") \
  917. do_intrinsic(_multiplyToLen, java_math_BigInteger, multiplyToLen_name, multiplyToLen_signature, F_S) \
  918. do_name( multiplyToLen_name, "multiplyToLen") \
  919. do_signature(multiplyToLen_signature, "([II[II[I)[I") \
  920. \
  921. do_intrinsic(_squareToLen, java_math_BigInteger, squareToLen_name, squareToLen_signature, F_S) \
  922. do_name( squareToLen_name, "implSquareToLen") \
  923. do_signature(squareToLen_signature, "([II[II)[I") \
  924. \
  925. do_intrinsic(_mulAdd, java_math_BigInteger, mulAdd_name, mulAdd_signature, F_S) \
  926. do_name( mulAdd_name, "implMulAdd") \
  927. do_signature(mulAdd_signature, "([I[IIII)I") \
  928. \
  929. do_intrinsic(_montgomeryMultiply, java_math_BigInteger, montgomeryMultiply_name, montgomeryMultiply_signature, F_S) \
  930. do_name( montgomeryMultiply_name, "implMontgomeryMultiply") \
  931. do_signature(montgomeryMultiply_signature, "([I[I[IIJ[I)[I") \
  932. \
  933. do_intrinsic(_montgomerySquare, java_math_BigInteger, montgomerySquare_name, montgomerySquare_signature, F_S) \
  934. do_name( montgomerySquare_name, "implMontgomerySquare") \
  935. do_signature(montgomerySquare_signature, "([I[IIJ[I)[I") \
  936. \
  937. do_intrinsic(_Reference_get, java_lang_ref_Reference, get_name, void_object_signature, F_R) \
  938. \
  939. do_class(com_sun_crypto_provider_aescrypt, "com/sun/crypto/provider/AESCrypt") \
  940. do_intrinsic(_aescrypt_encryptBlock, com_sun_crypto_provider_aescrypt, encryptBlock_name, byteArray_int_byteArray_int_signature, F_R) \
  941. do_intrinsic(_aescrypt_decryptBlock, com_sun_crypto_provider_aescrypt, decryptBlock_name, byteArray_int_byteArray_int_signature, F_R) \
  942. do_name( encryptBlock_name, "implEncryptBlock") \
  943. do_name( decryptBlock_name, "implDecryptBlock") \
  944. do_signature(byteArray_int_byteArray_int_signature, "([BI[BI)V") \
  945. \
  946. do_class(com_sun_crypto_provider_cipherBlockChaining, "com/sun/crypto/provider/CipherBlockChaining") \
  947. do_intrinsic(_cipherBlockChaining_encryptAESCrypt, com_sun_crypto_provider_cipherBlockChaining, encrypt_name, byteArray_int_int_byteArray_int_signature, F_R) \
  948. do_intrinsic(_cipherBlockChaining_decryptAESCrypt, com_sun_crypto_provider_cipherBlockChaining, decrypt_name, byteArray_int_int_byteArray_int_signature, F_R) \
  949. do_name( encrypt_name, "implEncrypt") \
  950. do_name( decrypt_name, "implDecrypt") \
  951. do_signature(byteArray_int_int_byteArray_int_signature, "([BII[BI)I") \
  952. \
  953. do_class(sun_security_provider_sha, "sun/security/provider/SHA") \
  954. do_intrinsic(_sha_implCompress, sun_security_provider_sha, implCompress_name, implCompress_signature, F_R) \
  955. do_name( implCompress_name, "implCompress0") \
  956. do_signature(implCompress_signature, "([BI)V") \
  957. \
  958. do_class(sun_security_provider_sha2, "sun/security/provider/SHA2") \
  959. do_intrinsic(_sha2_implCompress, sun_security_provider_sha2, implCompress_name, implCompress_signature, F_R) \
  960. \
  961. do_class(sun_security_provider_sha5, "sun/security/provider/SHA5") \
  962. do_intrinsic(_sha5_implCompress, sun_security_provider_sha5, implCompress_name, implCompress_signature, F_R) \
  963. \
  964. do_class(sun_security_provider_digestbase, "sun/security/provider/DigestBase") \
  965. do_intrinsic(_digestBase_implCompressMB, sun_security_provider_digestbase, implCompressMB_name, implCompressMB_signature, F_R) \
  966. do_name( implCompressMB_name, "implCompressMultiBlock0") \
  967. do_signature(implCompressMB_signature, "([BII)I") \
  968. \
  969. do_class(com_sun_crypto_provider_ghash, "com/sun/crypto/provider/GHASH") \
  970. do_intrinsic(_ghash_processBlocks, com_sun_crypto_provider_ghash, processBlocks_name, ghash_processBlocks_signature, F_S) \
  971. do_name(processBlocks_name, "processBlocks") \
  972. do_signature(ghash_processBlocks_signature, "([BII[J[J)V") \
  973. \
  974. do_class(java_util_zip_CRC32, "java/util/zip/CRC32") \
  975. do_intrinsic(_updateCRC32, java_util_zip_CRC32, update_name, int2_int_signature, F_SN) \
  976. do_name( update_name, "update") \
  977. do_intrinsic(_updateBytesCRC32, java_util_zip_CRC32, updateBytes_name, updateBytes_signature, F_SN) \
  978. do_name( updateBytes_name, "updateBytes") \
  979. do_signature(updateBytes_signature, "(I[BII)I") \
  980. do_intrinsic(_updateByteBufferCRC32, java_util_zip_CRC32, updateByteBuffer_name, updateByteBuffer_signature, F_SN) \
  981. do_name( updateByteBuffer_name, "updateByteBuffer") \
  982. do_signature(updateByteBuffer_signature, "(IJII)I") \
  983. \
  984. do_class(sun_misc_Unsafe, "sun/misc/Unsafe") \
  985. \
  986. do_intrinsic(_allocateInstance, sun_misc_Unsafe, allocateInstance_name, allocateInstance_signature, F_RN) \
  987. do_name( allocateInstance_name, "allocateInstance") \
  988. do_signature(allocateInstance_signature, "(Ljava/lang/Class;)Ljava/lang/Object;") \
  989. do_intrinsic(_copyMemory, sun_misc_Unsafe, copyMemory_name, copyMemory_signature, F_RN) \
  990. do_name( copyMemory_name, "copyMemory") \
  991. do_signature(copyMemory_signature, "(Ljava/lang/Object;JLjava/lang/Object;JJ)V") \
  992. do_intrinsic(_park, sun_misc_Unsafe, park_name, park_signature, F_RN) \
  993. do_name( park_name, "park") \
  994. do_signature(park_signature, "(ZJ)V") \
  995. do_intrinsic(_unpark, sun_misc_Unsafe, unpark_name, unpark_signature, F_RN) \
  996. do_name( unpark_name, "unpark") \
  997. do_alias( unpark_signature, /*(LObject;)V*/ object_void_signature) \
  998. do_intrinsic(_loadFence, sun_misc_Unsafe, loadFence_name, loadFence_signature, F_RN) \
  999. do_name( loadFence_name, "loadFence") \
  1000. do_alias( loadFence_signature, void_method_signature) \
  1001. do_intrinsic(_storeFence, sun_misc_Unsafe, storeFence_name, storeFence_signature, F_RN) \
  1002. do_name( storeFence_name, "storeFence") \
  1003. do_alias( storeFence_signature, void_method_signature) \
  1004. do_intrinsic(_fullFence, sun_misc_Unsafe, fullFence_name, fullFence_signature, F_RN) \
  1005. do_name( fullFence_name, "fullFence") \
  1006. do_alias( fullFence_signature, void_method_signature) \
  1007. \
  1008. do_class(java_lang_invoke_MethodHandleImpl, "java/lang/invoke/MethodHandleImpl") \
  1009. do_intrinsic(_profileBoolean, java_lang_invoke_MethodHandleImpl, profileBoolean_name, profileBoolean_signature, F_S) \
  1010. do_name( profileBoolean_name, "profileBoolean") \
  1011. do_signature(profileBoolean_signature, "(Z[I)Z") \
  1012. \
  1013. do_signature(getObject_signature, "(Ljava/lang/Object;J)Ljava/lang/Object;") \
  1014. do_signature(putObject_signature, "(Ljava/lang/Object;JLjava/lang/Object;)V") \
  1015. do_signature(getBoolean_signature, "(Ljava/lang/Object;J)Z") \
  1016. do_signature(putBoolean_signature, "(Ljava/lang/Object;JZ)V") \
  1017. do_signature(getByte_signature, "(Ljava/lang/Object;J)B") \
  1018. do_signature(putByte_signature, "(Ljava/lang/Object;JB)V") \
  1019. do_signature(getShort_signature, "(Ljava/lang/Object;J)S") \
  1020. do_signature(putShort_signature, "(Ljava/lang/Object;JS)V") \
  1021. do_signature(getChar_signature, "(Ljava/lang/Object;J)C") \
  1022. do_signature(putChar_signature, "(Ljava/lang/Object;JC)V") \
  1023. do_signature(getInt_signature, "(Ljava/lang/Object;J)I") \
  1024. do_signature(putInt_signature, "(Ljava/lang/Object;JI)V") \
  1025. do_signature(getLong_signature, "(Ljava/lang/Object;J)J") \
  1026. do_signature(putLong_signature, "(Ljava/lang/Object;JJ)V") \
  1027. do_signature(getFloat_signature, "(Ljava/lang/Object;J)F") \
  1028. do_signature(putFloat_signature, "(Ljava/lang/Object;JF)V") \
  1029. do_signature(getDouble_signature, "(Ljava/lang/Object;J)D") \
  1030. do_signature(putDouble_signature, "(Ljava/lang/Object;JD)V") \
  1031. \
  1032. do_name(getObject_name,"getObject") do_name(putObject_name,"putObject") \
  1033. do_name(getBoolean_name,"getBoolean") do_name(putBoolean_name,"putBoolean") \
  1034. do_name(getByte_name,"getByte") do_name(putByte_name,"putByte") \
  1035. do_name(getShort_name,"getShort") do_name(putShort_name,"putShort") \
  1036. do_name(getChar_name,"getChar") do_name(putChar_name,"putChar") \
  1037. do_name(getInt_name,"getInt") do_name(putInt_name,"putInt") \
  1038. do_name(getLong_name,"getLong") do_name(putLong_name,"putLong") \
  1039. do_name(getFloat_name,"getFloat") do_name(putFloat_name,"putFloat") \
  1040. do_name(getDouble_name,"getDouble") do_name(putDouble_name,"putDouble") \
  1041. \
  1042. do_intrinsic(_getObject, sun_misc_Unsafe, getObject_name, getObject_signature, F_RN) \
  1043. do_intrinsic(_getBoolean, sun_misc_Unsafe, getBoolean_name, getBoolean_signature, F_RN) \
  1044. do_intrinsic(_getByte, sun_misc_Unsafe, getByte_name, getByte_signature, F_RN) \
  1045. do_intrinsic(_getShort, sun_misc_Unsafe, getShort_name, getShort_signature, F_RN) \
  1046. do_intrinsic(_getChar, sun_misc_Unsafe, getChar_name, getChar_signature, F_RN) \
  1047. do_intrinsic(_getInt, sun_misc_Unsafe, getInt_name, getInt_signature, F_RN) \
  1048. do_intrinsic(_getLong, sun_misc_Unsafe, getLong_name, getLong_signature, F_RN) \
  1049. do_intrinsic(_getFloat, sun_misc_Unsafe, getFloat_name, getFloat_signature, F_RN) \
  1050. do_intrinsic(_getDouble, sun_misc_Unsafe, getDouble_name, getDouble_signature, F_RN) \
  1051. do_intrinsic(_putObject, sun_misc_Unsafe, putObject_name, putObject_signature, F_RN) \
  1052. do_intrinsic(_putBoolean, sun_misc_Unsafe, putBoolean_name, putBoolean_signature, F_RN) \
  1053. do_intrinsic(_putByte, sun_misc_Unsafe, putByte_name, putByte_signature, F_RN) \
  1054. do_intrinsic(_putShort, sun_misc_Unsafe, putShort_name, putShort_signature, F_RN) \
  1055. do_intrinsic(_putChar, sun_misc_Unsafe, putChar_name, putChar_signature, F_RN) \
  1056. do_intrinsic(_putInt, sun_misc_Unsafe, putInt_name, putInt_signature, F_RN) \
  1057. do_intrinsic(_putLong, sun_misc_Unsafe, putLong_name, putLong_signature, F_RN) \
  1058. do_intrinsic(_putFloat, sun_misc_Unsafe, putFloat_name, putFloat_signature, F_RN) \
  1059. do_intrinsic(_putDouble, sun_misc_Unsafe, putDouble_name, putDouble_signature, F_RN) \
  1060. \
  1061. do_name(getObjectVolatile_name,"getObjectVolatile") do_name(putObjectVolatile_name,"putObjectVolatile") \
  1062. do_name(getBooleanVolatile_name,"getBooleanVolatile") do_name(putBooleanVolatile_name,"putBooleanVolatile") \
  1063. do_name(getByteVolatile_name,"getByteVolatile") do_name(putByteVolatile_name,"putByteVolatile") \
  1064. do_name(getShortVolatile_name,"getShortVolatile") do_name(putShortVolatile_name,"putShortVolatile") \
  1065. do_name(getCharVolatile_name,"getCharVolatile") do_name(putCharVolatile_name,"putCharVolatile") \
  1066. do_name(getIntVolatile_name,"getIntVolatile") do_name(putIntVolatile_name,"putIntVolatile") \
  1067. do_name(getLongVolatile_name,"getLongVolatile") do_name(putLongVolatile_name,"putLongVolatile") \
  1068. do_name(getFloatVolatile_name,"getFloatVolatile") do_name(putFloatVolatile_name,"putFloatVolatile") \
  1069. do_name(getDoubleVolatile_name,"getDoubleVolatile") do_name(putDoubleVolatile_name,"putDoubleVolatile") \
  1070. \
  1071. do_intrinsic(_getObjectVolatile, sun_misc_Unsafe, getObjectVolatile_name, getObject_signature, F_RN) \
  1072. do_intrinsic(_getBooleanVolatile, sun_misc_Unsafe, getBooleanVolatile_name, getBoolean_signature, F_RN) \
  1073. do_intrinsic(_getByteVolatile, sun_misc_Unsafe, getByteVolatile_name, getByte_signature, F_RN) \
  1074. do_intrinsic(_getShortVolatile, sun_misc_Unsafe, getShortVolatile_name, getShort_signature, F_RN) \
  1075. do_intrinsic(_getCharVolatile, sun_misc_Unsafe, getCharVolatile_name, getChar_signature, F_RN) \
  1076. do_intrinsic(_getIntVolatile, sun_misc_Unsafe, getIntVolatile_name, getInt_signature, F_RN) \
  1077. do_intrinsic(_getLongVolatile, sun_misc_Unsafe, getLongVolatile_name, getLong_signature, F_RN) \
  1078. do_intrinsic(_getFloatVolatile, sun_misc_Unsafe, getFloatVolatile_name, getFloat_signature, F_RN) \
  1079. do_intrinsic(_getDoubleVolatile, sun_misc_Unsafe, getDoubleVolatile_name, getDouble_signature, F_RN) \
  1080. do_intrinsic(_putObjectVolatile, sun_misc_Unsafe, putObjectVolatile_name, putObject_signature, F_RN) \
  1081. do_intrinsic(_putBooleanVolatile, sun_misc_Unsafe, putBooleanVolatile_name, putBoolean_signature, F_RN) \
  1082. do_intrinsic(_putByteVolatile, sun_misc_Unsafe, putByteVolatile_name, putByte_signature, F_RN) \
  1083. do_intrinsic(_putShortVolatile, sun_misc_Unsafe, putShortVolatile_name, putShort_signature, F_RN) \
  1084. do_intrinsic(_putCharVolatile, sun_misc_Unsafe, putCharVolatile_name, putChar_signature, F_RN) \
  1085. do_intrinsic(_putIntVolatile, sun_misc_Unsafe, putIntVolatile_name, putInt_signature, F_RN) \
  1086. do_intrinsic(_putLongVolatile, sun_misc_Unsafe, putLongVolatile_name, putLong_signature, F_RN) \
  1087. do_intrinsic(_putFloatVolatile, sun_misc_Unsafe, putFloatVolatile_name, putFloat_signature, F_RN) \
  1088. do_intrinsic(_putDoubleVolatile, sun_misc_Unsafe, putDoubleVolatile_name, putDouble_signature, F_RN) \
  1089. \
  1090. do_signature(getByte_raw_signature, "(J)B") \
  1091. do_signature(putByte_raw_signature, "(JB)V") \
  1092. do_signature(getShort_raw_signature, "(J)S") \
  1093. do_signature(putShort_raw_signature, "(JS)V") \
  1094. do_signature(getChar_raw_signature, "(J)C") \
  1095. do_signature(putChar_raw_signature, "(JC)V") \
  1096. do_signature(putInt_raw_signature, "(JI)V") \
  1097. do_alias(getLong_raw_signature, /*(J)J*/ long_long_signature) \
  1098. do_alias(putLong_raw_signature, /*(JJ)V*/ long_long_void_signature) \
  1099. do_signature(getFloat_raw_signature, "(J)F") \
  1100. do_signature(putFloat_raw_signature, "(JF)V") \
  1101. do_alias(getDouble_raw_signature, /*(J)D*/ long_double_signature) \
  1102. do_signature(putDouble_raw_signature, "(JD)V") \
  1103. do_alias(getAddress_raw_signature, /*(J)J*/ long_long_signature) \
  1104. do_alias(putAddress_raw_signature, /*(JJ)V*/ long_long_void_signature) \
  1105. \
  1106. do_name( getAddress_name, "getAddress") \
  1107. do_name( putAddress_name, "putAddress") \
  1108. \
  1109. do_intrinsic(_getByte_raw, sun_misc_Unsafe, getByte_name, getByte_raw_signature, F_RN) \
  1110. do_intrinsic(_getShort_raw, sun_misc_Unsafe, getShort_name, getShort_raw_signature, F_RN) \
  1111. do_intrinsic(_getChar_raw, sun_misc_Unsafe, getChar_name, getChar_raw_signature, F_RN) \
  1112. do_intrinsic(_getInt_raw, sun_misc_Unsafe, getInt_name, long_int_signature, F_RN) \
  1113. do_intrinsic(_getLong_raw, sun_misc_Unsafe, getLong_name, getLong_raw_signature, F_RN) \
  1114. do_intrinsic(_getFloat_raw, sun_misc_Unsafe, getFloat_name, getFloat_raw_signature, F_RN) \
  1115. do_intrinsic(_getDouble_raw, sun_misc_Unsafe, getDouble_name, getDouble_raw_signature, F_RN) \
  1116. do_intrinsic(_getAddress_raw, sun_misc_Unsafe, getAddress_name, getAddress_raw_signature, F_RN) \
  1117. do_intrinsic(_putByte_raw, sun_misc_Unsafe, putByte_name, putByte_raw_signature, F_RN) \
  1118. do_intrinsic(_putShort_raw, sun_misc_Unsafe, putShort_name, putShort_raw_signature, F_RN) \
  1119. do_intrinsic(_putChar_raw, sun_misc_Unsafe, putChar_name, putChar_raw_signature, F_RN) \
  1120. do_intrinsic(_putInt_raw, sun_misc_Unsafe, putInt_name, putInt_raw_signature, F_RN) \
  1121. do_intrinsic(_putLong_raw, sun_misc_Unsafe, putLong_name, putLong_raw_signature, F_RN) \
  1122. do_intrinsic(_putFloat_raw, sun_misc_Unsafe, putFloat_name, putFloat_raw_signature, F_RN) \
  1123. do_intrinsic(_putDouble_raw, sun_misc_Unsafe, putDouble_name, putDouble_raw_signature, F_RN) \
  1124. do_intrinsic(_putAddress_raw, sun_misc_Unsafe, putAddress_name, putAddress_raw_signature, F_RN) \
  1125. \
  1126. do_intrinsic(_compareAndSwapObject, sun_misc_Unsafe, compareAndSwapObject_name, compareAndSwapObject_signature, F_RN) \
  1127. do_name( compareAndSwapObject_name, "compareAndSwapObject") \
  1128. do_signature(compareAndSwapObject_signature, "(Ljava/lang/Object;JLjava/lang/Object;Ljava/lang/Object;)Z") \
  1129. do_intrinsic(_compareAndSwapLong, sun_misc_Unsafe, compareAndSwapLong_name, compareAndSwapLong_signature, F_RN) \
  1130. do_name( compareAndSwapLong_name, "compareAndSwapLong") \
  1131. do_signature(compareAndSwapLong_signature, "(Ljava/lang/Object;JJJ)Z") \
  1132. do_intrinsic(_compareAndSwapInt, sun_misc_Unsafe, compareAndSwapInt_name, compareAndSwapInt_signature, F_RN) \
  1133. do_name( compareAndSwapInt_name, "compareAndSwapInt") \
  1134. do_signature(compareAndSwapInt_signature, "(Ljava/lang/Object;JII)Z") \
  1135. do_intrinsic(_putOrderedObject, sun_misc_Unsafe, putOrderedObject_name, putOrderedObject_signature, F_RN) \
  1136. do_name( putOrderedObject_name, "putOrderedObject") \
  1137. do_alias( putOrderedObject_signature, /*(LObject;JLObject;)V*/ putObject_signature) \
  1138. do_intrinsic(_putOrderedLong, sun_misc_Unsafe, putOrderedLong_name, putOrderedLong_signature, F_RN) \
  1139. do_name( putOrderedLong_name, "putOrderedLong") \
  1140. do_alias( putOrderedLong_signature, /*(Ljava/lang/Object;JJ)V*/ putLong_signature) \
  1141. do_intrinsic(_putOrderedInt, sun_misc_Unsafe, putOrderedInt_name, putOrderedInt_signature, F_RN) \
  1142. do_name( putOrderedInt_name, "putOrderedInt") \
  1143. do_alias( putOrderedInt_signature, /*(Ljava/lang/Object;JI)V*/ putInt_signature) \
  1144. \
  1145. do_intrinsic(_getAndAddInt, sun_misc_Unsafe, getAndAddInt_name, getAndAddInt_signature, F_R) \
  1146. do_name( getAndAddInt_name, "getAndAddInt") \
  1147. do_signature(getAndAddInt_signature, "(Ljava/lang/Object;JI)I" ) \
  1148. do_intrinsic(_getAndAddLong, sun_misc_Unsafe, getAndAddLong_name, getAndAddLong_signature, F_R) \
  1149. do_name( getAndAddLong_name, "getAndAddLong") \
  1150. do_signature(getAndAddLong_signature, "(Ljava/lang/Object;JJ)J" ) \
  1151. do_intrinsic(_getAndSetInt, sun_misc_Unsafe, getAndSetInt_name, getAndSetInt_signature, F_R) \
  1152. do_name( getAndSetInt_name, "getAndSetInt") \
  1153. do_alias( getAndSetInt_signature, /*"(Ljava/lang/Object;JI)I"*/ getAndAddInt_signature) \
  1154. do_intrinsic(_getAndSetLong, sun_misc_Unsafe, getAndSetLong_name, getAndSetLong_signature, F_R) \
  1155. do_name( getAndSetLong_name, "getAndSetLong") \
  1156. do_alias( getAndSetLong_signature, /*"(Ljava/lang/Object;JJ)J"*/ getAndAddLong_signature) \
  1157. do_intrinsic(_getAndSetObject, sun_misc_Unsafe, getAndSetObject_name, getAndSetObject_signature, F_R)\
  1158. do_name( getAndSetObject_name, "getAndSetObject") \
  1159. do_signature(getAndSetObject_signature, "(Ljava/lang/Object;JLjava/lang/Object;)Ljava/lang/Object;" ) \
  1160. \
  1161. do_signature( prefetch_signature, "(Ljava/lang/Object;J)V") \
  1162. \
  1163. do_intrinsic(_prefetchRead, sun_misc_Unsafe, prefetchRead_name, prefetch_signature, F_RN) \
  1164. do_name( prefetchRead_name, "prefetchRead") \
  1165. do_intrinsic(_prefetchWrite, sun_misc_Unsafe, prefetchWrite_name, prefetch_signature, F_RN) \
  1166. do_name( prefetchWrite_name, "prefetchWrite") \
  1167. do_intrinsic(_prefetchReadStatic, sun_misc_Unsafe, prefetchReadStatic_name, prefetch_signature, F_SN) \
  1168. do_name( prefetchReadStatic_name, "prefetchReadStatic") \
  1169. do_intrinsic(_prefetchWriteStatic, sun_misc_Unsafe, prefetchWriteStatic_name, prefetch_signature, F_SN) \
  1170. do_name( prefetchWriteStatic_name, "prefetchWriteStatic") \
  1171. \
  1172. do_intrinsic(_fillInStackTrace, java_lang_Throwable, fillInStackTrace_name, void_throwable_signature, F_RNY) \
  1173. \
  1174. do_intrinsic(_StringBuilder_void, java_lang_StringBuilder, object_initializer_name, void_method_signature, F_R) \
  1175. do_intrinsic(_StringBuilder_int, java_lang_StringBuilder, object_initializer_name, int_void_signature, F_R) \
  1176. do_intrinsic(_StringBuilder_String, java_lang_StringBuilder, object_initializer_name, string_void_signature, F_R) \
  1177. \
  1178. do_intrinsic(_StringBuilder_append_char, java_lang_StringBuilder, append_name, char_StringBuilder_signature, F_R) \
  1179. do_intrinsic(_StringBuilder_append_int, java_lang_StringBuilder, append_name, int_StringBuilder_signature, F_R) \
  1180. do_intrinsic(_StringBuilder_append_String, java_lang_StringBuilder, append_name, String_StringBuilder_signature, F_R) \
  1181. \
  1182. do_intrinsic(_StringBuilder_toString, java_lang_StringBuilder, toString_name, void_string_signature, F_R) \
  1183. \
  1184. do_intrinsic(_StringBuffer_void, java_lang_StringBuffer, object_initializer_name, void_method_signature, F_R) \
  1185. do_intrinsic(_StringBuffer_int, java_lang_StringBuffer, object_initializer_name, int_void_signature, F_R) \
  1186. do_intrinsic(_StringBuffer_String, java_lang_StringBuffer, object_initializer_name, string_void_signature, F_R) \
  1187. \
  1188. do_intrinsic(_StringBuffer_append_char, java_lang_StringBuffer, append_name, char_StringBuffer_signature, F_Y) \
  1189. do_intrinsic(_StringBuffer_append_int, java_lang_StringBuffer, append_name, int_StringBuffer_signature, F_Y) \
  1190. do_intrinsic(_StringBuffer_append_String, java_lang_StringBuffer, append_name, String_StringBuffer_signature, F_Y) \
  1191. \
  1192. do_intrinsic(_StringBuffer_toString, java_lang_StringBuffer, toString_name, void_string_signature, F_Y) \
  1193. \
  1194. do_intrinsic(_Integer_toString, java_lang_Integer, toString_name, int_String_signature, F_S) \
  1195. \
  1196. do_intrinsic(_String_String, java_lang_String, object_initializer_name, string_void_signature, F_R) \
  1197. \
  1198. do_intrinsic(_Object_init, java_lang_Object, object_initializer_name, void_method_signature, F_R) \
  1199. \
  1200. do_intrinsic(_invoke, java_lang_reflect_Method, invoke_name, object_object_array_object_signature, F_R) \
  1201. do_intrinsic(_invokeGeneric, java_lang_invoke_MethodHandle, invoke_name, star_name, F_RN) \
  1202. do_intrinsic(_invokeBasic, java_lang_invoke_MethodHandle, invokeBasic_name, star_name, F_RN) \
  1203. do_intrinsic(_linkToVirtual, java_lang_invoke_MethodHandle, linkToVirtual_name, star_name, F_SN) \
  1204. do_intrinsic(_linkToStatic, java_lang_invoke_MethodHandle, linkToStatic_name, star_name, F_SN) \
  1205. do_intrinsic(_linkToSpecial, java_lang_invoke_MethodHandle, linkToSpecial_name, star_name, F_SN) \
  1206. do_intrinsic(_linkToInterface, java_lang_invoke_MethodHandle, linkToInterface_name, star_name, F_SN) \
  1207. do_intrinsic(_compiledLambdaForm, java_lang_invoke_MethodHandle, compiledLambdaForm_name, star_name, F_RN) \
  1208. \
  1209. do_intrinsic(_booleanValue, java_lang_Boolean, booleanValue_name, void_boolean_signature, F_R) \
  1210. do_name( booleanValue_name, "booleanValue") \
  1211. do_intrinsic(_byteValue, java_lang_Byte, byteValue_name, void_byte_signature, F_R) \
  1212. do_name( byteValue_name, "byteValue") \
  1213. do_intrinsic(_charValue, java_lang_Character, charValue_name, void_char_signature, F_R) \
  1214. do_name( charValue_name, "charValue") \
  1215. do_intrinsic(_shortValue, java_lang_Short, shortValue_name, void_short_signature, F_R) \
  1216. do_name( shortValue_name, "shortValue") \
  1217. do_intrinsic(_intValue, java_lang_Integer, intValue_name, void_int_signature, F_R) \
  1218. do_name( intValue_name, "intValue") \
  1219. do_intrinsic(_longValue, java_lang_Long, longValue_name, void_long_signature, F_R) \
  1220. do_name( longValue_name, "longValue") \
  1221. do_intrinsic(_floatValue, java_lang_Float, floatValue_name, void_float_signature, F_R) \
  1222. do_name( floatValue_name, "floatValue") \
  1223. do_intrinsic(_doubleValue, java_lang_Double, doubleValue_name, void_double_signature, F_R) \
  1224. do_name( doubleValue_name, "doubleValue") \
  1225. \
  1226. do_name( valueOf_name, "valueOf") \
  1227. do_intrinsic(_Boolean_valueOf, java_lang_Boolean, valueOf_name, Boolean_valueOf_signature, F_S) \
  1228. do_name( Boolean_valueOf_signature, "(Z)Ljava/lang/Boolean;") \
  1229. do_intrinsic(_Byte_valueOf, java_lang_Byte, valueOf_name, Byte_valueOf_signature, F_S) \
  1230. do_name( Byte_valueOf_signature, "(B)Ljava/lang/Byte;") \
  1231. do_intrinsic(_Character_valueOf, java_lang_Character, valueOf_name, Character_valueOf_signature, F_S) \
  1232. do_name( Character_valueOf_signature, "(C)Ljava/lang/Character;") \
  1233. do_intrinsic(_Short_valueOf, java_lang_Short, valueOf_name, Short_valueOf_signature, F_S) \
  1234. do_name( Short_valueOf_signature, "(S)Ljava/lang/Short;") \
  1235. do_intrinsic(_Integer_valueOf, java_lang_Integer, valueOf_name, Integer_valueOf_signature, F_S) \
  1236. do_name( Integer_valueOf_signature, "(I)Ljava/lang/Integer;") \
  1237. do_intrinsic(_Long_valueOf, java_lang_Long, valueOf_name, Long_valueOf_signature, F_S) \
  1238. do_name( Long_valueOf_signature, "(J)Ljava/lang/Long;") \
  1239. do_intrinsic(_Float_valueOf, java_lang_Float, valueOf_name, Float_valueOf_signature, F_S) \
  1240. do_name( Float_valueOf_signature, "(F)Ljava/lang/Float;") \
  1241. do_intrinsic(_Double_valueOf, java_lang_Double, valueOf_name, Double_valueOf_signature, F_S) \
  1242. do_name( Double_valueOf_signature, "(D)Ljava/lang/Double;") \
  1243. \
  1244. class vmSymbols: AllStatic {
  1245. friend class vmIntrinsics;
  1246. friend class VMStructs;
  1247. public:
  1248. enum SID {
  1249. NO_SID = 0,
  1250. #define VM_SYMBOL_ENUM(name, string) VM_SYMBOL_ENUM_NAME(name),
  1251. VM_SYMBOLS_DO(VM_SYMBOL_ENUM, VM_ALIAS_IGNORE)
  1252. #undef VM_SYMBOL_ENUM
  1253. SID_LIMIT,
  1254. #define VM_ALIAS_ENUM(name, def) VM_SYMBOL_ENUM_NAME(name) = VM_SYMBOL_ENUM_NAME(def),
  1255. VM_SYMBOLS_DO(VM_SYMBOL_IGNORE, VM_ALIAS_ENUM)
  1256. #undef VM_ALIAS_ENUM
  1257. FIRST_SID = NO_SID + 1
  1258. };
  1259. enum {
  1260. log2_SID_LIMIT = 10 // checked by an assert at start-up
  1261. };
  1262. private:
  1263. static Symbol* _symbols[];
  1264. static Symbol* _type_signatures[T_VOID+1];
  1265. public:
  1266. static void initialize(TRAPS);
  1267. #define VM_SYMBOL_DECLARE(name, ignore) \
  1268. static Symbol* name() { \
  1269. return _symbols[VM_SYMBOL_ENUM_NAME(name)]; \
  1270. }
  1271. VM_SYMBOLS_DO(VM_SYMBOL_DECLARE, VM_SYMBOL_DECLARE)
  1272. #undef VM_SYMBOL_DECLARE
  1273. static void symbols_do(SymbolClosure* f);
  1274. static void serialize(SerializeClosure* soc);
  1275. static Symbol* type_signature(BasicType t) {
  1276. assert((uint)t < T_VOID+1, "range check");
  1277. assert(_type_signatures[t] != NULL, "domain check");
  1278. return _type_signatures[t];
  1279. }
  1280. static BasicType signature_type(Symbol* s);
  1281. static Symbol* symbol_at(SID id) {
  1282. assert(id >= FIRST_SID && id < SID_LIMIT, "oob");
  1283. assert(_symbols[id] != NULL, "init");
  1284. return _symbols[id];
  1285. }
  1286. static SID find_sid(Symbol* symbol);
  1287. static SID find_sid(const char* symbol_name);
  1288. #ifndef PRODUCT
  1289. static const char* name_for(SID sid);
  1290. #endif //PRODUCT
  1291. };
  1292. class vmIntrinsics: AllStatic {
  1293. friend class vmSymbols;
  1294. friend class ciObjectFactory;
  1295. public:
  1296. enum ID {
  1297. _none = 0, // not an intrinsic (default answer)
  1298. #define VM_INTRINSIC_ENUM(id, klass, name, sig, flags) id,
  1299. VM_INTRINSICS_DO(VM_INTRINSIC_ENUM,
  1300. VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, VM_ALIAS_IGNORE)
  1301. #undef VM_INTRINSIC_ENUM
  1302. ID_LIMIT,
  1303. LAST_COMPILER_INLINE = _prefetchWriteStatic,
  1304. FIRST_MH_SIG_POLY = _invokeGeneric,
  1305. FIRST_MH_STATIC = _linkToVirtual,
  1306. LAST_MH_SIG_POLY = _linkToInterface,
  1307. FIRST_ID = _none + 1
  1308. };
  1309. enum Flags {
  1310. F_none = 0,
  1311. F_R, // !static ?native !synchronized (R="regular")
  1312. F_S, // static ?native !synchronized
  1313. F_Y, // !static ?native synchronized
  1314. F_RN, // !static native !synchronized
  1315. F_SN, // static native !synchronized
  1316. F_RNY, // !static native synchronized
  1317. FLAG_LIMIT
  1318. };
  1319. enum {
  1320. log2_FLAG_LIMIT = 4 // checked by an assert at start-up
  1321. };
  1322. public:
  1323. static ID ID_from(int raw_id) {
  1324. assert(raw_id >= (int)_none && raw_id < (int)ID_LIMIT,
  1325. "must be a valid intrinsic ID");
  1326. return (ID)raw_id;
  1327. }
  1328. static const char* name_at(ID id);
  1329. private:
  1330. static ID find_id_impl(vmSymbols::SID holder,
  1331. vmSymbols::SID name,
  1332. vmSymbols::SID sig,
  1333. jshort flags);
  1334. public:
  1335. static ID find_id(vmSymbols::SID holder,
  1336. vmSymbols::SID name,
  1337. vmSymbols::SID sig,
  1338. jshort flags) {
  1339. ID id = find_id_impl(holder, name, sig, flags);
  1340. #ifdef ASSERT
  1341. if (id == _none) return id;
  1342. #endif
  1343. assert( class_for(id) == holder, "correct id");
  1344. assert( name_for(id) == name, "correct id");
  1345. assert(signature_for(id) == sig, "correct id");
  1346. return id;
  1347. }
  1348. static void verify_method(ID actual_id, Method* m) PRODUCT_RETURN;
  1349. static vmSymbols::SID class_for(ID id);
  1350. static vmSymbols::SID name_for(ID id);
  1351. static vmSymbols::SID signature_for(ID id);
  1352. static Flags flags_for(ID id);
  1353. static const char* short_name_as_C_string(ID id, char* buf, int size);
  1354. static ID for_boxing(BasicType type);
  1355. static ID for_unboxing(BasicType type);
  1356. static ID for_raw_conversion(BasicType src, BasicType dest);
  1357. static bool should_be_pinned(vmIntrinsics::ID id);
  1358. };
  1359. #endif // SHARE_VM_CLASSFILE_VMSYMBOLS_HPP
  1360. C:\hotspot-69087d08d473\src\share\vm/code/codeBlob.cpp
  1361. #include "precompiled.hpp"
  1362. #include "code/codeBlob.hpp"
  1363. #include "code/codeCache.hpp"
  1364. #include "code/relocInfo.hpp"
  1365. #include "compiler/disassembler.hpp"
  1366. #include "interpreter/bytecode.hpp"
  1367. #include "memory/allocation.inline.hpp"
  1368. #include "memory/heap.hpp"
  1369. #include "oops/oop.inline.hpp"
  1370. #include "prims/forte.hpp"
  1371. #include "runtime/handles.inline.hpp"
  1372. #include "runtime/interfaceSupport.hpp"
  1373. #include "runtime/mutexLocker.hpp"
  1374. #include "runtime/safepoint.hpp"
  1375. #include "runtime/sharedRuntime.hpp"
  1376. #include "runtime/vframe.hpp"
  1377. #include "services/memoryService.hpp"
  1378. #ifdef TARGET_ARCH_x86
  1379. # include "nativeInst_x86.hpp"
  1380. #endif
  1381. #ifdef TARGET_ARCH_aarch64
  1382. # include "nativeInst_aarch64.hpp"
  1383. #endif
  1384. #ifdef TARGET_ARCH_sparc
  1385. # include "nativeInst_sparc.hpp"
  1386. #endif
  1387. #ifdef TARGET_ARCH_zero
  1388. # include "nativeInst_zero.hpp"
  1389. #endif
  1390. #ifdef TARGET_ARCH_arm
  1391. # include "nativeInst_arm.hpp"
  1392. #endif
  1393. #ifdef TARGET_ARCH_ppc
  1394. # include "nativeInst_ppc.hpp"
  1395. #endif
  1396. #ifdef COMPILER1
  1397. #include "c1/c1_Runtime1.hpp"
  1398. #endif
  1399. unsigned int CodeBlob::align_code_offset(int offset) {
  1400. return
  1401. ((offset + (int)CodeHeap::header_size() + (CodeEntryAlignment-1)) & ~(CodeEntryAlignment-1))
  1402. - (int)CodeHeap::header_size();
  1403. }
  1404. unsigned int CodeBlob::allocation_size(CodeBuffer* cb, int header_size) {
  1405. unsigned int size = header_size;
  1406. size += round_to(cb->total_relocation_size(), oopSize);
  1407. size = align_code_offset(size);
  1408. size += round_to(cb->total_content_size(), oopSize);
  1409. size += round_to(cb->total_oop_size(), oopSize);
  1410. size += round_to(cb->total_metadata_size(), oopSize);
  1411. return size;
  1412. }
  1413. CodeBlob::CodeBlob(const char* name, int header_size, int size, int frame_complete, int locs_size) {
  1414. assert(size == round_to(size, oopSize), "unaligned size");
  1415. assert(locs_size == round_to(locs_size, oopSize), "unaligned size");
  1416. assert(header_size == round_to(header_size, oopSize), "unaligned size");
  1417. assert(!UseRelocIndex, "no space allocated for reloc index yet");
  1418. _name = name;
  1419. _size = size;
  1420. _frame_complete_offset = frame_complete;
  1421. _header_size = header_size;
  1422. _relocation_size = locs_size;
  1423. _content_offset = align_code_offset(header_size + _relocation_size);
  1424. _code_offset = _content_offset;
  1425. _data_offset = size;
  1426. _frame_size = 0;
  1427. set_oop_maps(NULL);
  1428. }
  1429. CodeBlob::CodeBlob(
  1430. const char* name,
  1431. CodeBuffer* cb,
  1432. int header_size,
  1433. int size,
  1434. int frame_complete,
  1435. int frame_size,
  1436. OopMapSet* oop_maps
  1437. ) {
  1438. assert(size == round_to(size, oopSize), "unaligned size");
  1439. assert(header_size == round_to(header_size, oopSize), "unaligned size");
  1440. _name = name;
  1441. _size = size;
  1442. _frame_complete_offset = frame_complete;
  1443. _header_size = header_size;
  1444. _relocation_size = round_to(cb->total_relocation_size(), oopSize);
  1445. _content_offset = align_code_offset(header_size + _relocation_size);
  1446. _code_offset = _content_offset + cb->total_offset_of(cb->insts());
  1447. _data_offset = _content_offset + round_to(cb->total_content_size(), oopSize);
  1448. assert(_data_offset <= size, "codeBlob is too small");
  1449. cb->copy_code_and_locs_to(this);
  1450. set_oop_maps(oop_maps);
  1451. _frame_size = frame_size;
  1452. #ifdef COMPILER1
  1453. assert(_frame_size >= -1, "must use frame size or -1 for runtime stubs");
  1454. #endif // COMPILER1
  1455. }
  1456. void CodeBlob::set_oop_maps(OopMapSet* p) {
  1457. if (p != NULL) {
  1458. _oop_maps = (OopMapSet* )NEW_C_HEAP_ARRAY(unsigned char, p->heap_size(), mtCode);
  1459. p->copy_to((address)_oop_maps);
  1460. } else {
  1461. _oop_maps = NULL;
  1462. }
  1463. }
  1464. void CodeBlob::trace_new_stub(CodeBlob* stub, const char* name1, const char* name2) {
  1465. assert(!CodeCache_lock->owned_by_self(), "release CodeCache before registering the stub");
  1466. if (stub != NULL) {
  1467. char stub_id[256];
  1468. assert(strlen(name1) + strlen(name2) < sizeof(stub_id), "");
  1469. jio_snprintf(stub_id, sizeof(stub_id), "%s%s", name1, name2);
  1470. if (PrintStubCode) {
  1471. ttyLocker ttyl;
  1472. tty->print_cr("Decoding %s " INTPTR_FORMAT, stub_id, (intptr_t) stub);
  1473. Disassembler::decode(stub->code_begin(), stub->code_end());
  1474. tty->cr();
  1475. }
  1476. Forte::register_stub(stub_id, stub->code_begin(), stub->code_end());
  1477. if (JvmtiExport::should_post_dynamic_code_generated()) {
  1478. const char* stub_name = name2;
  1479. if (name2[0] == '\0') stub_name = name1;
  1480. JvmtiExport::post_dynamic_code_generated(stub_name, stub->code_begin(), stub->code_end());
  1481. }
  1482. }
  1483. MemoryService::track_code_cache_memory_usage();
  1484. }
  1485. void CodeBlob::flush() {
  1486. if (_oop_maps) {
  1487. FREE_C_HEAP_ARRAY(unsigned char, _oop_maps, mtCode);
  1488. _oop_maps = NULL;
  1489. }
  1490. _strings.free();
  1491. }
  1492. OopMap* CodeBlob::oop_map_for_return_address(address return_address) {
  1493. assert(oop_maps() != NULL, "nope");
  1494. return oop_maps()->find_map_at_offset((intptr_t) return_address - (intptr_t) code_begin());
  1495. }
  1496. void CodeBlob::print_code() {
  1497. HandleMark hm;
  1498. ResourceMark m;
  1499. Disassembler::decode(this, tty);
  1500. }
  1501. BufferBlob::BufferBlob(const char* name, int size)
  1502. : CodeBlob(name, sizeof(BufferBlob), size, CodeOffsets::frame_never_safe, /*locs_size:*/ 0)
  1503. {}
  1504. BufferBlob* BufferBlob::create(const char* name, int buffer_size) {
  1505. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1506. BufferBlob* blob = NULL;
  1507. unsigned int size = sizeof(BufferBlob);
  1508. size = align_code_offset(size);
  1509. size += round_to(buffer_size, oopSize);
  1510. assert(name != NULL, "must provide a name");
  1511. {
  1512. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1513. blob = new (size) BufferBlob(name, size);
  1514. }
  1515. MemoryService::track_code_cache_memory_usage();
  1516. return blob;
  1517. }
  1518. BufferBlob::BufferBlob(const char* name, int size, CodeBuffer* cb)
  1519. : CodeBlob(name, cb, sizeof(BufferBlob), size, CodeOffsets::frame_never_safe, 0, NULL)
  1520. {}
  1521. BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
  1522. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1523. BufferBlob* blob = NULL;
  1524. unsigned int size = allocation_size(cb, sizeof(BufferBlob));
  1525. assert(name != NULL, "must provide a name");
  1526. {
  1527. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1528. blob = new (size) BufferBlob(name, size, cb);
  1529. }
  1530. MemoryService::track_code_cache_memory_usage();
  1531. return blob;
  1532. }
  1533. void* BufferBlob::operator new(size_t s, unsigned size, bool is_critical) throw() {
  1534. void* p = CodeCache::allocate(size, is_critical);
  1535. return p;
  1536. }
  1537. void BufferBlob::free( BufferBlob *blob ) {
  1538. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1539. blob->flush();
  1540. {
  1541. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1542. CodeCache::free((CodeBlob*)blob);
  1543. }
  1544. MemoryService::track_code_cache_memory_usage();
  1545. }
  1546. AdapterBlob::AdapterBlob(int size, CodeBuffer* cb) :
  1547. BufferBlob("I2C/C2I adapters", size, cb) {
  1548. CodeCache::commit(this);
  1549. }
  1550. AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
  1551. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1552. AdapterBlob* blob = NULL;
  1553. unsigned int size = allocation_size(cb, sizeof(AdapterBlob));
  1554. {
  1555. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1556. const bool is_critical = true;
  1557. blob = new (size, is_critical) AdapterBlob(size, cb);
  1558. }
  1559. MemoryService::track_code_cache_memory_usage();
  1560. return blob;
  1561. }
  1562. VtableBlob::VtableBlob(const char* name, int size) :
  1563. BufferBlob(name, size) {
  1564. }
  1565. VtableBlob* VtableBlob::create(const char* name, int buffer_size) {
  1566. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1567. VtableBlob* blob = NULL;
  1568. unsigned int size = sizeof(VtableBlob);
  1569. size = align_code_offset(size);
  1570. size += round_to(buffer_size, oopSize);
  1571. assert(name != NULL, "must provide a name");
  1572. {
  1573. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1574. blob = new (size) VtableBlob(name, size);
  1575. }
  1576. MemoryService::track_code_cache_memory_usage();
  1577. return blob;
  1578. }
  1579. MethodHandlesAdapterBlob* MethodHandlesAdapterBlob::create(int buffer_size) {
  1580. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1581. MethodHandlesAdapterBlob* blob = NULL;
  1582. unsigned int size = sizeof(MethodHandlesAdapterBlob);
  1583. size = align_code_offset(size);
  1584. size += round_to(buffer_size, oopSize);
  1585. {
  1586. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1587. const bool is_critical = true;
  1588. blob = new (size, is_critical) MethodHandlesAdapterBlob(size);
  1589. }
  1590. MemoryService::track_code_cache_memory_usage();
  1591. return blob;
  1592. }
  1593. RuntimeStub::RuntimeStub(
  1594. const char* name,
  1595. CodeBuffer* cb,
  1596. int size,
  1597. int frame_complete,
  1598. int frame_size,
  1599. OopMapSet* oop_maps,
  1600. bool caller_must_gc_arguments
  1601. )
  1602. : CodeBlob(name, cb, sizeof(RuntimeStub), size, frame_complete, frame_size, oop_maps)
  1603. {
  1604. _caller_must_gc_arguments = caller_must_gc_arguments;
  1605. }
  1606. RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
  1607. CodeBuffer* cb,
  1608. int frame_complete,
  1609. int frame_size,
  1610. OopMapSet* oop_maps,
  1611. bool caller_must_gc_arguments)
  1612. {
  1613. RuntimeStub* stub = NULL;
  1614. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1615. {
  1616. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1617. unsigned int size = allocation_size(cb, sizeof(RuntimeStub));
  1618. stub = new (size) RuntimeStub(stub_name, cb, size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
  1619. }
  1620. trace_new_stub(stub, "RuntimeStub - ", stub_name);
  1621. return stub;
  1622. }
  1623. void* RuntimeStub::operator new(size_t s, unsigned size) throw() {
  1624. void* p = CodeCache::allocate(size, true);
  1625. if (!p) fatal("Initial size of CodeCache is too small");
  1626. return p;
  1627. }
  1628. void* SingletonBlob::operator new(size_t s, unsigned size) throw() {
  1629. void* p = CodeCache::allocate(size, true);
  1630. if (!p) fatal("Initial size of CodeCache is too small");
  1631. return p;
  1632. }
  1633. DeoptimizationBlob::DeoptimizationBlob(
  1634. CodeBuffer* cb,
  1635. int size,
  1636. OopMapSet* oop_maps,
  1637. int unpack_offset,
  1638. int unpack_with_exception_offset,
  1639. int unpack_with_reexecution_offset,
  1640. int frame_size
  1641. )
  1642. : SingletonBlob("DeoptimizationBlob", cb, sizeof(DeoptimizationBlob), size, frame_size, oop_maps)
  1643. {
  1644. _unpack_offset = unpack_offset;
  1645. _unpack_with_exception = unpack_with_exception_offset;
  1646. _unpack_with_reexecution = unpack_with_reexecution_offset;
  1647. #ifdef COMPILER1
  1648. _unpack_with_exception_in_tls = -1;
  1649. #endif
  1650. }
  1651. DeoptimizationBlob* DeoptimizationBlob::create(
  1652. CodeBuffer* cb,
  1653. OopMapSet* oop_maps,
  1654. int unpack_offset,
  1655. int unpack_with_exception_offset,
  1656. int unpack_with_reexecution_offset,
  1657. int frame_size)
  1658. {
  1659. DeoptimizationBlob* blob = NULL;
  1660. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1661. {
  1662. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1663. unsigned int size = allocation_size(cb, sizeof(DeoptimizationBlob));
  1664. blob = new (size) DeoptimizationBlob(cb,
  1665. size,
  1666. oop_maps,
  1667. unpack_offset,
  1668. unpack_with_exception_offset,
  1669. unpack_with_reexecution_offset,
  1670. frame_size);
  1671. }
  1672. trace_new_stub(blob, "DeoptimizationBlob");
  1673. return blob;
  1674. }
  1675. #ifdef COMPILER2
  1676. UncommonTrapBlob::UncommonTrapBlob(
  1677. CodeBuffer* cb,
  1678. int size,
  1679. OopMapSet* oop_maps,
  1680. int frame_size
  1681. )
  1682. : SingletonBlob("UncommonTrapBlob", cb, sizeof(UncommonTrapBlob), size, frame_size, oop_maps)
  1683. {}
  1684. UncommonTrapBlob* UncommonTrapBlob::create(
  1685. CodeBuffer* cb,
  1686. OopMapSet* oop_maps,
  1687. int frame_size)
  1688. {
  1689. UncommonTrapBlob* blob = NULL;
  1690. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1691. {
  1692. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1693. unsigned int size = allocation_size(cb, sizeof(UncommonTrapBlob));
  1694. blob = new (size) UncommonTrapBlob(cb, size, oop_maps, frame_size);
  1695. }
  1696. trace_new_stub(blob, "UncommonTrapBlob");
  1697. return blob;
  1698. }
  1699. #endif // COMPILER2
  1700. #ifdef COMPILER2
  1701. ExceptionBlob::ExceptionBlob(
  1702. CodeBuffer* cb,
  1703. int size,
  1704. OopMapSet* oop_maps,
  1705. int frame_size
  1706. )
  1707. : SingletonBlob("ExceptionBlob", cb, sizeof(ExceptionBlob), size, frame_size, oop_maps)
  1708. {}
  1709. ExceptionBlob* ExceptionBlob::create(
  1710. CodeBuffer* cb,
  1711. OopMapSet* oop_maps,
  1712. int frame_size)
  1713. {
  1714. ExceptionBlob* blob = NULL;
  1715. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1716. {
  1717. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1718. unsigned int size = allocation_size(cb, sizeof(ExceptionBlob));
  1719. blob = new (size) ExceptionBlob(cb, size, oop_maps, frame_size);
  1720. }
  1721. trace_new_stub(blob, "ExceptionBlob");
  1722. return blob;
  1723. }
  1724. #endif // COMPILER2
  1725. SafepointBlob::SafepointBlob(
  1726. CodeBuffer* cb,
  1727. int size,
  1728. OopMapSet* oop_maps,
  1729. int frame_size
  1730. )
  1731. : SingletonBlob("SafepointBlob", cb, sizeof(SafepointBlob), size, frame_size, oop_maps)
  1732. {}
  1733. SafepointBlob* SafepointBlob::create(
  1734. CodeBuffer* cb,
  1735. OopMapSet* oop_maps,
  1736. int frame_size)
  1737. {
  1738. SafepointBlob* blob = NULL;
  1739. ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
  1740. {
  1741. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  1742. unsigned int size = allocation_size(cb, sizeof(SafepointBlob));
  1743. blob = new (size) SafepointBlob(cb, size, oop_maps, frame_size);
  1744. }
  1745. trace_new_stub(blob, "SafepointBlob");
  1746. return blob;
  1747. }
  1748. void CodeBlob::verify() {
  1749. ShouldNotReachHere();
  1750. }
  1751. void CodeBlob::print_on(outputStream* st) const {
  1752. st->print_cr("[CodeBlob (" INTPTR_FORMAT ")]", p2i(this));
  1753. st->print_cr("Framesize: %d", _frame_size);
  1754. }
  1755. void CodeBlob::print_value_on(outputStream* st) const {
  1756. st->print_cr("[CodeBlob]");
  1757. }
  1758. void BufferBlob::verify() {
  1759. }
  1760. void BufferBlob::print_on(outputStream* st) const {
  1761. CodeBlob::print_on(st);
  1762. print_value_on(st);
  1763. }
  1764. void BufferBlob::print_value_on(outputStream* st) const {
  1765. st->print_cr("BufferBlob (" INTPTR_FORMAT ") used for %s", p2i(this), name());
  1766. }
  1767. void RuntimeStub::verify() {
  1768. }
  1769. void RuntimeStub::print_on(outputStream* st) const {
  1770. ttyLocker ttyl;
  1771. CodeBlob::print_on(st);
  1772. st->print("Runtime Stub (" INTPTR_FORMAT "): ", p2i(this));
  1773. st->print_cr("%s", name());
  1774. Disassembler::decode((CodeBlob*)this, st);
  1775. }
  1776. void RuntimeStub::print_value_on(outputStream* st) const {
  1777. st->print("RuntimeStub (" INTPTR_FORMAT "): ", p2i(this)); st->print("%s", name());
  1778. }
  1779. void SingletonBlob::verify() {
  1780. }
  1781. void SingletonBlob::print_on(outputStream* st) const {
  1782. ttyLocker ttyl;
  1783. CodeBlob::print_on(st);
  1784. st->print_cr("%s", name());
  1785. Disassembler::decode((CodeBlob*)this, st);
  1786. }
  1787. void SingletonBlob::print_value_on(outputStream* st) const {
  1788. st->print_cr("%s", name());
  1789. }
  1790. void DeoptimizationBlob::print_value_on(outputStream* st) const {
  1791. st->print_cr("Deoptimization (frame not available)");
  1792. }
  1793. C:\hotspot-69087d08d473\src\share\vm/code/codeBlob.hpp
  1794. #ifndef SHARE_VM_CODE_CODEBLOB_HPP
  1795. #define SHARE_VM_CODE_CODEBLOB_HPP
  1796. #include "asm/codeBuffer.hpp"
  1797. #include "compiler/oopMap.hpp"
  1798. #include "runtime/frame.hpp"
  1799. #include "runtime/handles.hpp"
  1800. struct CodeBlobType {
  1801. enum {
  1802. All = 0, // All types (No code cache segmentation)
  1803. NumTypes = 1 // Number of CodeBlobTypes
  1804. };
  1805. };
  1806. class DeoptimizationBlob;
  1807. class CodeBlob VALUE_OBJ_CLASS_SPEC {
  1808. friend class VMStructs;
  1809. private:
  1810. const char* _name;
  1811. int _size; // total size of CodeBlob in bytes
  1812. int _header_size; // size of header (depends on subclass)
  1813. int _relocation_size; // size of relocation
  1814. int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
  1815. int _code_offset; // offset to where instructions region begins (this includes insts, stubs)
  1816. int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
  1817. int _data_offset; // offset to where data region begins
  1818. int _frame_size; // size of stack frame
  1819. OopMapSet* _oop_maps; // OopMap for this CodeBlob
  1820. CodeStrings _strings;
  1821. public:
  1822. static unsigned int allocation_size(CodeBuffer* cb, int header_size);
  1823. static unsigned int align_code_offset(int offset);
  1824. CodeBlob(const char* name, int header_size, int size, int frame_complete, int locs_size);
  1825. CodeBlob(
  1826. const char* name,
  1827. CodeBuffer* cb,
  1828. int header_size,
  1829. int size,
  1830. int frame_complete,
  1831. int frame_size,
  1832. OopMapSet* oop_maps
  1833. );
  1834. void flush();
  1835. virtual bool is_buffer_blob() const { return false; }
  1836. virtual bool is_nmethod() const { return false; }
  1837. virtual bool is_runtime_stub() const { return false; }
  1838. virtual bool is_deoptimization_stub() const { return false; }
  1839. virtual bool is_uncommon_trap_stub() const { return false; }
  1840. virtual bool is_exception_stub() const { return false; }
  1841. virtual bool is_safepoint_stub() const { return false; }
  1842. virtual bool is_adapter_blob() const { return false; }
  1843. virtual bool is_vtable_blob() const { return false; }
  1844. virtual bool is_method_handles_adapter_blob() const { return false; }
  1845. virtual bool is_compiled_by_c2() const { return false; }
  1846. virtual bool is_compiled_by_c1() const { return false; }
  1847. nmethod* as_nmethod_or_null() { return is_nmethod() ? (nmethod*) this : NULL; }
  1848. address header_begin() const { return (address) this; }
  1849. address header_end() const { return ((address) this) + _header_size; };
  1850. relocInfo* relocation_begin() const { return (relocInfo*) header_end(); };
  1851. relocInfo* relocation_end() const { return (relocInfo*)(header_end() + _relocation_size); }
  1852. address content_begin() const { return (address) header_begin() + _content_offset; }
  1853. address content_end() const { return (address) header_begin() + _data_offset; }
  1854. address code_begin() const { return (address) header_begin() + _code_offset; }
  1855. address code_end() const { return (address) header_begin() + _data_offset; }
  1856. address data_begin() const { return (address) header_begin() + _data_offset; }
  1857. address data_end() const { return (address) header_begin() + _size; }
  1858. int relocation_offset() const { return _header_size; }
  1859. int content_offset() const { return _content_offset; }
  1860. int code_offset() const { return _code_offset; }
  1861. int data_offset() const { return _data_offset; }
  1862. int size() const { return _size; }
  1863. int header_size() const { return _header_size; }
  1864. int relocation_size() const { return (address) relocation_end() - (address) relocation_begin(); }
  1865. int content_size() const { return content_end() - content_begin(); }
  1866. int code_size() const { return code_end() - code_begin(); }
  1867. int data_size() const { return data_end() - data_begin(); }
  1868. bool blob_contains(address addr) const { return header_begin() <= addr && addr < data_end(); }
  1869. bool relocation_contains(relocInfo* addr) const{ return relocation_begin() <= addr && addr < relocation_end(); }
  1870. bool content_contains(address addr) const { return content_begin() <= addr && addr < content_end(); }
  1871. bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); }
  1872. bool data_contains(address addr) const { return data_begin() <= addr && addr < data_end(); }
  1873. bool contains(address addr) const { return content_contains(addr); }
  1874. bool is_frame_complete_at(address addr) const { return code_contains(addr) &&
  1875. addr >= code_begin() + _frame_complete_offset; }
  1876. virtual bool is_zombie() const { return false; }
  1877. virtual bool is_locked_by_vm() const { return false; }
  1878. virtual bool is_unloaded() const { return false; }
  1879. virtual bool is_not_entrant() const { return false; }
  1880. virtual bool is_alive() const = 0;
  1881. OopMapSet* oop_maps() const { return _oop_maps; }
  1882. void set_oop_maps(OopMapSet* p);
  1883. OopMap* oop_map_for_return_address(address return_address);
  1884. virtual void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { ShouldNotReachHere(); }
  1885. int frame_size() const { return _frame_size; }
  1886. void set_frame_size(int size) { _frame_size = size; }
  1887. virtual bool caller_must_gc_arguments(JavaThread* thread) const { return false; }
  1888. const char* name() const { return _name; }
  1889. void set_name(const char* name) { _name = name; }
  1890. virtual void verify();
  1891. void print() const { print_on(tty); }
  1892. virtual void print_on(outputStream* st) const;
  1893. virtual void print_value_on(outputStream* st) const;
  1894. void print_code();
  1895. static void trace_new_stub(CodeBlob* blob, const char* name1, const char* name2 = "");
  1896. virtual void print_block_comment(outputStream* stream, address block_begin) const {
  1897. intptr_t offset = (intptr_t)(block_begin - code_begin());
  1898. _strings.print_block_comment(stream, offset);
  1899. }
  1900. void set_strings(CodeStrings& strings) {
  1901. _strings.assign(strings);
  1902. }
  1903. };
  1904. class BufferBlob: public CodeBlob {
  1905. friend class VMStructs;
  1906. friend class AdapterBlob;
  1907. friend class VtableBlob;
  1908. friend class MethodHandlesAdapterBlob;
  1909. friend class WhiteBox;
  1910. private:
  1911. BufferBlob(const char* name, int size);
  1912. BufferBlob(const char* name, int size, CodeBuffer* cb);
  1913. void* operator new(size_t s, unsigned size, bool is_critical = false) throw();
  1914. public:
  1915. static BufferBlob* create(const char* name, int buffer_size);
  1916. static BufferBlob* create(const char* name, CodeBuffer* cb);
  1917. static void free(BufferBlob* buf);
  1918. virtual bool is_buffer_blob() const { return true; }
  1919. void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { /* nothing to do */ }
  1920. bool is_alive() const { return true; }
  1921. void verify();
  1922. void print_on(outputStream* st) const;
  1923. void print_value_on(outputStream* st) const;
  1924. };
  1925. class AdapterBlob: public BufferBlob {
  1926. private:
  1927. AdapterBlob(int size, CodeBuffer* cb);
  1928. public:
  1929. static AdapterBlob* create(CodeBuffer* cb);
  1930. virtual bool is_adapter_blob() const { return true; }
  1931. };
  1932. class VtableBlob: public BufferBlob {
  1933. private:
  1934. VtableBlob(const char*, int);
  1935. public:
  1936. static VtableBlob* create(const char* name, int buffer_size);
  1937. virtual bool is_vtable_blob() const { return true; }
  1938. };
  1939. class MethodHandlesAdapterBlob: public BufferBlob {
  1940. private:
  1941. MethodHandlesAdapterBlob(int size) : BufferBlob("MethodHandles adapters", size) {}
  1942. public:
  1943. static MethodHandlesAdapterBlob* create(int buffer_size);
  1944. virtual bool is_method_handles_adapter_blob() const { return true; }
  1945. };
  1946. class RuntimeStub: public CodeBlob {
  1947. friend class VMStructs;
  1948. private:
  1949. bool _caller_must_gc_arguments;
  1950. RuntimeStub(
  1951. const char* name,
  1952. CodeBuffer* cb,
  1953. int size,
  1954. int frame_complete,
  1955. int frame_size,
  1956. OopMapSet* oop_maps,
  1957. bool caller_must_gc_arguments
  1958. );
  1959. void* operator new(size_t s, unsigned size) throw();
  1960. public:
  1961. static RuntimeStub* new_runtime_stub(
  1962. const char* stub_name,
  1963. CodeBuffer* cb,
  1964. int frame_complete,
  1965. int frame_size,
  1966. OopMapSet* oop_maps,
  1967. bool caller_must_gc_arguments
  1968. );
  1969. bool is_runtime_stub() const { return true; }
  1970. bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; }
  1971. address entry_point() { return code_begin(); }
  1972. void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* nothing to do */ }
  1973. bool is_alive() const { return true; }
  1974. void verify();
  1975. void print_on(outputStream* st) const;
  1976. void print_value_on(outputStream* st) const;
  1977. };
  1978. class SingletonBlob: public CodeBlob {
  1979. friend class VMStructs;
  1980. protected:
  1981. void* operator new(size_t s, unsigned size) throw();
  1982. public:
  1983. SingletonBlob(
  1984. const char* name,
  1985. CodeBuffer* cb,
  1986. int header_size,
  1987. int size,
  1988. int frame_size,
  1989. OopMapSet* oop_maps
  1990. )
  1991. : CodeBlob(name, cb, header_size, size, CodeOffsets::frame_never_safe, frame_size, oop_maps)
  1992. {};
  1993. address entry_point() { return code_begin(); }
  1994. bool is_alive() const { return true; }
  1995. void verify(); // does nothing
  1996. void print_on(outputStream* st) const;
  1997. void print_value_on(outputStream* st) const;
  1998. };
  1999. class DeoptimizationBlob: public SingletonBlob {
  2000. friend class VMStructs;
  2001. private:
  2002. int _unpack_offset;
  2003. int _unpack_with_exception;
  2004. int _unpack_with_reexecution;
  2005. int _unpack_with_exception_in_tls;
  2006. DeoptimizationBlob(
  2007. CodeBuffer* cb,
  2008. int size,
  2009. OopMapSet* oop_maps,
  2010. int unpack_offset,
  2011. int unpack_with_exception_offset,
  2012. int unpack_with_reexecution_offset,
  2013. int frame_size
  2014. );
  2015. public:
  2016. static DeoptimizationBlob* create(
  2017. CodeBuffer* cb,
  2018. OopMapSet* oop_maps,
  2019. int unpack_offset,
  2020. int unpack_with_exception_offset,
  2021. int unpack_with_reexecution_offset,
  2022. int frame_size
  2023. );
  2024. bool is_deoptimization_stub() const { return true; }
  2025. bool exception_address_is_unpack_entry(address pc) const {
  2026. address unpack_pc = unpack();
  2027. return (pc == unpack_pc || (pc + frame::pc_return_offset) == unpack_pc);
  2028. }
  2029. void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* Nothing to do */ }
  2030. void print_value_on(outputStream* st) const;
  2031. address unpack() const { return code_begin() + _unpack_offset; }
  2032. address unpack_with_exception() const { return code_begin() + _unpack_with_exception; }
  2033. address unpack_with_reexecution() const { return code_begin() + _unpack_with_reexecution; }
  2034. void set_unpack_with_exception_in_tls_offset(int offset) {
  2035. _unpack_with_exception_in_tls = offset;
  2036. assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob");
  2037. }
  2038. address unpack_with_exception_in_tls() const { return code_begin() + _unpack_with_exception_in_tls; }
  2039. };
  2040. #ifdef COMPILER2
  2041. class UncommonTrapBlob: public SingletonBlob {
  2042. friend class VMStructs;
  2043. private:
  2044. UncommonTrapBlob(
  2045. CodeBuffer* cb,
  2046. int size,
  2047. OopMapSet* oop_maps,
  2048. int frame_size
  2049. );
  2050. public:
  2051. static UncommonTrapBlob* create(
  2052. CodeBuffer* cb,
  2053. OopMapSet* oop_maps,
  2054. int frame_size
  2055. );
  2056. void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { /* nothing to do */ }
  2057. bool is_uncommon_trap_stub() const { return true; }
  2058. };
  2059. class ExceptionBlob: public SingletonBlob {
  2060. friend class VMStructs;
  2061. private:
  2062. ExceptionBlob(
  2063. CodeBuffer* cb,
  2064. int size,
  2065. OopMapSet* oop_maps,
  2066. int frame_size
  2067. );
  2068. public:
  2069. static ExceptionBlob* create(
  2070. CodeBuffer* cb,
  2071. OopMapSet* oop_maps,
  2072. int frame_size
  2073. );
  2074. void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { /* nothing to do */ }
  2075. bool is_exception_stub() const { return true; }
  2076. };
  2077. #endif // COMPILER2
  2078. class SafepointBlob: public SingletonBlob {
  2079. friend class VMStructs;
  2080. private:
  2081. SafepointBlob(
  2082. CodeBuffer* cb,
  2083. int size,
  2084. OopMapSet* oop_maps,
  2085. int frame_size
  2086. );
  2087. public:
  2088. static SafepointBlob* create(
  2089. CodeBuffer* cb,
  2090. OopMapSet* oop_maps,
  2091. int frame_size
  2092. );
  2093. void preserve_callee_argument_oops(frame fr, const RegisterMap* reg_map, OopClosure* f) { /* nothing to do */ }
  2094. bool is_safepoint_stub() const { return true; }
  2095. };
  2096. #endif // SHARE_VM_CODE_CODEBLOB_HPP
  2097. C:\hotspot-69087d08d473\src\share\vm/code/codeCache.cpp
  2098. #include "precompiled.hpp"
  2099. #include "code/codeBlob.hpp"
  2100. #include "code/codeCache.hpp"
  2101. #include "code/compiledIC.hpp"
  2102. #include "code/dependencies.hpp"
  2103. #include "code/icBuffer.hpp"
  2104. #include "code/nmethod.hpp"
  2105. #include "code/pcDesc.hpp"
  2106. #include "compiler/compileBroker.hpp"
  2107. #include "gc_implementation/shared/markSweep.hpp"
  2108. #include "jfr/jfrEvents.hpp"
  2109. #include "memory/allocation.inline.hpp"
  2110. #include "memory/gcLocker.hpp"
  2111. #include "memory/iterator.hpp"
  2112. #include "memory/resourceArea.hpp"
  2113. #include "oops/method.hpp"
  2114. #include "oops/objArrayOop.hpp"
  2115. #include "oops/oop.inline.hpp"
  2116. #include "runtime/handles.inline.hpp"
  2117. #include "runtime/arguments.hpp"
  2118. #include "runtime/deoptimization.hpp"
  2119. #include "runtime/icache.hpp"
  2120. #include "runtime/java.hpp"
  2121. #include "runtime/mutexLocker.hpp"
  2122. #include "services/memoryService.hpp"
  2123. #include "utilities/xmlstream.hpp"
  2124. class CodeBlob_sizes {
  2125. private:
  2126. int count;
  2127. int total_size;
  2128. int header_size;
  2129. int code_size;
  2130. int stub_size;
  2131. int relocation_size;
  2132. int scopes_oop_size;
  2133. int scopes_metadata_size;
  2134. int scopes_data_size;
  2135. int scopes_pcs_size;
  2136. public:
  2137. CodeBlob_sizes() {
  2138. count = 0;
  2139. total_size = 0;
  2140. header_size = 0;
  2141. code_size = 0;
  2142. stub_size = 0;
  2143. relocation_size = 0;
  2144. scopes_oop_size = 0;
  2145. scopes_metadata_size = 0;
  2146. scopes_data_size = 0;
  2147. scopes_pcs_size = 0;
  2148. }
  2149. int total() { return total_size; }
  2150. bool is_empty() { return count == 0; }
  2151. void print(const char* title) {
  2152. tty->print_cr(" #%d %s = %dK (hdr %d%%, loc %d%%, code %d%%, stub %d%%, [oops %d%%, metadata %d%%, data %d%%, pcs %d%%])",
  2153. count,
  2154. title,
  2155. (int)(total() / K),
  2156. header_size * 100 / total_size,
  2157. relocation_size * 100 / total_size,
  2158. code_size * 100 / total_size,
  2159. stub_size * 100 / total_size,
  2160. scopes_oop_size * 100 / total_size,
  2161. scopes_metadata_size * 100 / total_size,
  2162. scopes_data_size * 100 / total_size,
  2163. scopes_pcs_size * 100 / total_size);
  2164. }
  2165. void add(CodeBlob* cb) {
  2166. count++;
  2167. total_size += cb->size();
  2168. header_size += cb->header_size();
  2169. relocation_size += cb->relocation_size();
  2170. if (cb->is_nmethod()) {
  2171. nmethod* nm = cb->as_nmethod_or_null();
  2172. code_size += nm->insts_size();
  2173. stub_size += nm->stub_size();
  2174. scopes_oop_size += nm->oops_size();
  2175. scopes_metadata_size += nm->metadata_size();
  2176. scopes_data_size += nm->scopes_data_size();
  2177. scopes_pcs_size += nm->scopes_pcs_size();
  2178. } else {
  2179. code_size += cb->code_size();
  2180. }
  2181. }
  2182. };
  2183. CodeHeap * CodeCache::_heap = new CodeHeap();
  2184. int CodeCache::_number_of_blobs = 0;
  2185. int CodeCache::_number_of_adapters = 0;
  2186. int CodeCache::_number_of_nmethods = 0;
  2187. int CodeCache::_number_of_nmethods_with_dependencies = 0;
  2188. bool CodeCache::_needs_cache_clean = false;
  2189. nmethod* CodeCache::_scavenge_root_nmethods = NULL;
  2190. int CodeCache::_codemem_full_count = 0;
  2191. CodeBlob* CodeCache::first() {
  2192. assert_locked_or_safepoint(CodeCache_lock);
  2193. return (CodeBlob*)_heap->first();
  2194. }
  2195. CodeBlob* CodeCache::next(CodeBlob* cb) {
  2196. assert_locked_or_safepoint(CodeCache_lock);
  2197. return (CodeBlob*)_heap->next(cb);
  2198. }
  2199. CodeBlob* CodeCache::alive(CodeBlob *cb) {
  2200. assert_locked_or_safepoint(CodeCache_lock);
  2201. while (cb != NULL && !cb->is_alive()) cb = next(cb);
  2202. return cb;
  2203. }
  2204. nmethod* CodeCache::alive_nmethod(CodeBlob* cb) {
  2205. assert_locked_or_safepoint(CodeCache_lock);
  2206. while (cb != NULL && (!cb->is_alive() || !cb->is_nmethod())) cb = next(cb);
  2207. return (nmethod*)cb;
  2208. }
  2209. nmethod* CodeCache::first_nmethod() {
  2210. assert_locked_or_safepoint(CodeCache_lock);
  2211. CodeBlob* cb = first();
  2212. while (cb != NULL && !cb->is_nmethod()) {
  2213. cb = next(cb);
  2214. }
  2215. return (nmethod*)cb;
  2216. }
  2217. nmethod* CodeCache::next_nmethod (CodeBlob* cb) {
  2218. assert_locked_or_safepoint(CodeCache_lock);
  2219. cb = next(cb);
  2220. while (cb != NULL && !cb->is_nmethod()) {
  2221. cb = next(cb);
  2222. }
  2223. return (nmethod*)cb;
  2224. }
  2225. static size_t maxCodeCacheUsed = 0;
  2226. CodeBlob* CodeCache::allocate(int size, bool is_critical) {
  2227. guarantee(size >= 0, "allocation request must be reasonable");
  2228. assert_locked_or_safepoint(CodeCache_lock);
  2229. CodeBlob* cb = NULL;
  2230. _number_of_blobs++;
  2231. while (true) {
  2232. cb = (CodeBlob*)_heap->allocate(size, is_critical);
  2233. if (cb != NULL) break;
  2234. if (!_heap->expand_by(CodeCacheExpansionSize)) {
  2235. if (CodeCache_lock->owned_by_self()) {
  2236. MutexUnlockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  2237. report_codemem_full();
  2238. } else {
  2239. report_codemem_full();
  2240. }
  2241. return NULL;
  2242. }
  2243. if (PrintCodeCacheExtension) {
  2244. ResourceMark rm;
  2245. tty->print_cr("code cache extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (" SSIZE_FORMAT " bytes)",
  2246. (intptr_t)_heap->low_boundary(), (intptr_t)_heap->high(),
  2247. (address)_heap->high() - (address)_heap->low_boundary());
  2248. }
  2249. }
  2250. maxCodeCacheUsed = MAX2(maxCodeCacheUsed, ((address)_heap->high_boundary() -
  2251. (address)_heap->low_boundary()) - unallocated_capacity());
  2252. verify_if_often();
  2253. print_trace("allocation", cb, size);
  2254. return cb;
  2255. }
  2256. void CodeCache::free(CodeBlob* cb) {
  2257. assert_locked_or_safepoint(CodeCache_lock);
  2258. verify_if_often();
  2259. print_trace("free", cb);
  2260. if (cb->is_nmethod()) {
  2261. _number_of_nmethods--;
  2262. if (((nmethod *)cb)->has_dependencies()) {
  2263. _number_of_nmethods_with_dependencies--;
  2264. }
  2265. }
  2266. if (cb->is_adapter_blob()) {
  2267. _number_of_adapters--;
  2268. }
  2269. _number_of_blobs--;
  2270. _heap->deallocate(cb);
  2271. verify_if_often();
  2272. assert(_number_of_blobs >= 0, "sanity check");
  2273. }
  2274. void CodeCache::commit(CodeBlob* cb) {
  2275. assert_locked_or_safepoint(CodeCache_lock);
  2276. if (cb->is_nmethod()) {
  2277. _number_of_nmethods++;
  2278. if (((nmethod *)cb)->has_dependencies()) {
  2279. _number_of_nmethods_with_dependencies++;
  2280. }
  2281. }
  2282. if (cb->is_adapter_blob()) {
  2283. _number_of_adapters++;
  2284. }
  2285. ICache::invalidate_range(cb->content_begin(), cb->content_size());
  2286. }
  2287. void CodeCache::flush() {
  2288. assert_locked_or_safepoint(CodeCache_lock);
  2289. Unimplemented();
  2290. }
  2291. #define FOR_ALL_BLOBS(var) for (CodeBlob *var = first() ; var != NULL; var = next(var) )
  2292. #define FOR_ALL_ALIVE_BLOBS(var) for (CodeBlob *var = alive(first()); var != NULL; var = alive(next(var)))
  2293. #define FOR_ALL_ALIVE_NMETHODS(var) for (nmethod *var = alive_nmethod(first()); var != NULL; var = alive_nmethod(next(var)))
  2294. bool CodeCache::contains(void *p) {
  2295. return _heap->contains(p);
  2296. }
  2297. CodeBlob* CodeCache::find_blob(void* start) {
  2298. CodeBlob* result = find_blob_unsafe(start);
  2299. if (result == NULL) return NULL;
  2300. guarantee(!result->is_zombie() || result->is_locked_by_vm() || is_error_reported(), "unsafe access to zombie method");
  2301. return result;
  2302. }
  2303. nmethod* CodeCache::find_nmethod(void* start) {
  2304. CodeBlob *cb = find_blob(start);
  2305. assert(cb == NULL || cb->is_nmethod(), "did not find an nmethod");
  2306. return (nmethod*)cb;
  2307. }
  2308. void CodeCache::blobs_do(void f(CodeBlob* nm)) {
  2309. assert_locked_or_safepoint(CodeCache_lock);
  2310. FOR_ALL_BLOBS(p) {
  2311. f(p);
  2312. }
  2313. }
  2314. void CodeCache::nmethods_do(void f(nmethod* nm)) {
  2315. assert_locked_or_safepoint(CodeCache_lock);
  2316. FOR_ALL_BLOBS(nm) {
  2317. if (nm->is_nmethod()) f((nmethod*)nm);
  2318. }
  2319. }
  2320. void CodeCache::alive_nmethods_do(void f(nmethod* nm)) {
  2321. assert_locked_or_safepoint(CodeCache_lock);
  2322. FOR_ALL_ALIVE_NMETHODS(nm) {
  2323. f(nm);
  2324. }
  2325. }
  2326. int CodeCache::alignment_unit() {
  2327. return (int)_heap->alignment_unit();
  2328. }
  2329. int CodeCache::alignment_offset() {
  2330. return (int)_heap->alignment_offset();
  2331. }
  2332. void CodeCache::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
  2333. assert_locked_or_safepoint(CodeCache_lock);
  2334. FOR_ALL_ALIVE_NMETHODS(nm) {
  2335. nm->do_unloading(is_alive, unloading_occurred);
  2336. }
  2337. }
  2338. void CodeCache::blobs_do(CodeBlobClosure* f) {
  2339. assert_locked_or_safepoint(CodeCache_lock);
  2340. FOR_ALL_ALIVE_BLOBS(cb) {
  2341. f->do_code_blob(cb);
  2342. #ifdef ASSERT
  2343. if (cb->is_nmethod())
  2344. ((nmethod*)cb)->verify_scavenge_root_oops();
  2345. #endif //ASSERT
  2346. }
  2347. }
  2348. void CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure* f) {
  2349. assert_locked_or_safepoint(CodeCache_lock);
  2350. if (UseG1GC) {
  2351. return;
  2352. }
  2353. const bool fix_relocations = f->fix_relocations();
  2354. debug_only(mark_scavenge_root_nmethods());
  2355. nmethod* prev = NULL;
  2356. nmethod* cur = scavenge_root_nmethods();
  2357. while (cur != NULL) {
  2358. debug_only(cur->clear_scavenge_root_marked());
  2359. assert(cur->scavenge_root_not_marked(), "");
  2360. assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
  2361. bool is_live = (!cur->is_zombie() && !cur->is_unloaded());
  2362. #ifndef PRODUCT
  2363. if (TraceScavenge) {
  2364. cur->print_on(tty, is_live ? "scavenge root" : "dead scavenge root"); tty->cr();
  2365. }
  2366. #endif //PRODUCT
  2367. if (is_live) {
  2368. f->do_code_blob(cur);
  2369. }
  2370. nmethod* const next = cur->scavenge_root_link();
  2371. if (fix_relocations) {
  2372. if (!is_live || !cur->detect_scavenge_root_oops()) {
  2373. unlink_scavenge_root_nmethod(cur, prev);
  2374. } else {
  2375. prev = cur;
  2376. }
  2377. }
  2378. cur = next;
  2379. }
  2380. debug_only(verify_perm_nmethods(NULL));
  2381. }
  2382. void CodeCache::add_scavenge_root_nmethod(nmethod* nm) {
  2383. assert_locked_or_safepoint(CodeCache_lock);
  2384. if (UseG1GC) {
  2385. return;
  2386. }
  2387. nm->set_on_scavenge_root_list();
  2388. nm->set_scavenge_root_link(_scavenge_root_nmethods);
  2389. set_scavenge_root_nmethods(nm);
  2390. print_trace("add_scavenge_root", nm);
  2391. }
  2392. void CodeCache::unlink_scavenge_root_nmethod(nmethod* nm, nmethod* prev) {
  2393. assert_locked_or_safepoint(CodeCache_lock);
  2394. assert((prev == NULL && scavenge_root_nmethods() == nm) ||
  2395. (prev != NULL && prev->scavenge_root_link() == nm), "precondition");
  2396. assert(!UseG1GC, "G1 does not use the scavenge_root_nmethods list");
  2397. print_trace("unlink_scavenge_root", nm);
  2398. if (prev == NULL) {
  2399. set_scavenge_root_nmethods(nm->scavenge_root_link());
  2400. } else {
  2401. prev->set_scavenge_root_link(nm->scavenge_root_link());
  2402. }
  2403. nm->set_scavenge_root_link(NULL);
  2404. nm->clear_on_scavenge_root_list();
  2405. }
  2406. void CodeCache::drop_scavenge_root_nmethod(nmethod* nm) {
  2407. assert_locked_or_safepoint(CodeCache_lock);
  2408. if (UseG1GC) {
  2409. return;
  2410. }
  2411. print_trace("drop_scavenge_root", nm);
  2412. nmethod* prev = NULL;
  2413. for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
  2414. if (cur == nm) {
  2415. unlink_scavenge_root_nmethod(cur, prev);
  2416. return;
  2417. }
  2418. prev = cur;
  2419. }
  2420. assert(false, "should have been on list");
  2421. }
  2422. void CodeCache::prune_scavenge_root_nmethods() {
  2423. assert_locked_or_safepoint(CodeCache_lock);
  2424. if (UseG1GC) {
  2425. return;
  2426. }
  2427. debug_only(mark_scavenge_root_nmethods());
  2428. nmethod* last = NULL;
  2429. nmethod* cur = scavenge_root_nmethods();
  2430. while (cur != NULL) {
  2431. nmethod* next = cur->scavenge_root_link();
  2432. debug_only(cur->clear_scavenge_root_marked());
  2433. assert(cur->scavenge_root_not_marked(), "");
  2434. assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
  2435. if (!cur->is_zombie() && !cur->is_unloaded()
  2436. && cur->detect_scavenge_root_oops()) {
  2437. last = cur;
  2438. } else {
  2439. print_trace("prune_scavenge_root", cur);
  2440. unlink_scavenge_root_nmethod(cur, last);
  2441. }
  2442. cur = next;
  2443. }
  2444. debug_only(verify_perm_nmethods(NULL));
  2445. }
  2446. #ifndef PRODUCT
  2447. void CodeCache::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) {
  2448. if (UseG1GC) {
  2449. return;
  2450. }
  2451. mark_scavenge_root_nmethods();
  2452. for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
  2453. assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
  2454. cur->clear_scavenge_root_marked();
  2455. }
  2456. verify_perm_nmethods(f);
  2457. }
  2458. void CodeCache::mark_scavenge_root_nmethods() {
  2459. FOR_ALL_ALIVE_BLOBS(cb) {
  2460. if (cb->is_nmethod()) {
  2461. nmethod *nm = (nmethod*)cb;
  2462. assert(nm->scavenge_root_not_marked(), "clean state");
  2463. if (nm->on_scavenge_root_list())
  2464. nm->set_scavenge_root_marked();
  2465. }
  2466. }
  2467. }
  2468. void CodeCache::verify_perm_nmethods(CodeBlobClosure* f_or_null) {
  2469. FOR_ALL_ALIVE_BLOBS(cb) {
  2470. bool call_f = (f_or_null != NULL);
  2471. if (cb->is_nmethod()) {
  2472. nmethod *nm = (nmethod*)cb;
  2473. assert(nm->scavenge_root_not_marked(), "must be already processed");
  2474. if (nm->on_scavenge_root_list())
  2475. call_f = false; // don't show this one to the client
  2476. nm->verify_scavenge_root_oops();
  2477. } else {
  2478. call_f = false; // not an nmethod
  2479. }
  2480. if (call_f) f_or_null->do_code_blob(cb);
  2481. }
  2482. }
  2483. #endif //PRODUCT
  2484. void CodeCache::verify_clean_inline_caches() {
  2485. #ifdef ASSERT
  2486. FOR_ALL_ALIVE_BLOBS(cb) {
  2487. if (cb->is_nmethod()) {
  2488. nmethod* nm = (nmethod*)cb;
  2489. assert(!nm->is_unloaded(), "Tautology");
  2490. nm->verify_clean_inline_caches();
  2491. nm->verify();
  2492. }
  2493. }
  2494. #endif
  2495. }
  2496. void CodeCache::verify_icholder_relocations() {
  2497. #ifdef ASSERT
  2498. int count = 0;
  2499. FOR_ALL_BLOBS(cb) {
  2500. if (cb->is_nmethod()) {
  2501. nmethod* nm = (nmethod*)cb;
  2502. count += nm->verify_icholder_relocations();
  2503. }
  2504. }
  2505. assert(count + InlineCacheBuffer::pending_icholder_count() + CompiledICHolder::live_not_claimed_count() ==
  2506. CompiledICHolder::live_count(), "must agree");
  2507. #endif
  2508. }
  2509. void CodeCache::gc_prologue() {
  2510. }
  2511. void CodeCache::gc_epilogue() {
  2512. assert_locked_or_safepoint(CodeCache_lock);
  2513. NOT_DEBUG(if (needs_cache_clean())) {
  2514. FOR_ALL_ALIVE_BLOBS(cb) {
  2515. if (cb->is_nmethod()) {
  2516. nmethod *nm = (nmethod*)cb;
  2517. assert(!nm->is_unloaded(), "Tautology");
  2518. DEBUG_ONLY(if (needs_cache_clean())) {
  2519. nm->cleanup_inline_caches();
  2520. }
  2521. DEBUG_ONLY(nm->verify());
  2522. DEBUG_ONLY(nm->verify_oop_relocations());
  2523. }
  2524. }
  2525. }
  2526. set_needs_cache_clean(false);
  2527. prune_scavenge_root_nmethods();
  2528. verify_icholder_relocations();
  2529. }
  2530. void CodeCache::verify_oops() {
  2531. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  2532. VerifyOopClosure voc;
  2533. FOR_ALL_ALIVE_BLOBS(cb) {
  2534. if (cb->is_nmethod()) {
  2535. nmethod *nm = (nmethod*)cb;
  2536. nm->oops_do(&voc);
  2537. nm->verify_oop_relocations();
  2538. }
  2539. }
  2540. }
  2541. address CodeCache::first_address() {
  2542. assert_locked_or_safepoint(CodeCache_lock);
  2543. return (address)_heap->low_boundary();
  2544. }
  2545. address CodeCache::last_address() {
  2546. assert_locked_or_safepoint(CodeCache_lock);
  2547. return (address)_heap->high();
  2548. }
  2549. double CodeCache::reverse_free_ratio() {
  2550. double unallocated_capacity = (double)(CodeCache::unallocated_capacity() - CodeCacheMinimumFreeSpace);
  2551. double max_capacity = (double)CodeCache::max_capacity();
  2552. return max_capacity / unallocated_capacity;
  2553. }
  2554. void icache_init();
  2555. void CodeCache::initialize() {
  2556. assert(CodeCacheSegmentSize >= (uintx)CodeEntryAlignment, "CodeCacheSegmentSize must be large enough to align entry points");
  2557. #ifdef COMPILER2
  2558. assert(CodeCacheSegmentSize >= (uintx)OptoLoopAlignment, "CodeCacheSegmentSize must be large enough to align inner loops");
  2559. #endif
  2560. assert(CodeCacheSegmentSize >= sizeof(jdouble), "CodeCacheSegmentSize must be large enough to align constants");
  2561. CodeCacheExpansionSize = round_to(CodeCacheExpansionSize, os::vm_page_size());
  2562. InitialCodeCacheSize = round_to(InitialCodeCacheSize, os::vm_page_size());
  2563. ReservedCodeCacheSize = round_to(ReservedCodeCacheSize, os::vm_page_size());
  2564. if (!_heap->reserve(ReservedCodeCacheSize, InitialCodeCacheSize, CodeCacheSegmentSize)) {
  2565. vm_exit_during_initialization("Could not reserve enough space for code cache");
  2566. }
  2567. MemoryService::add_code_heap_memory_pool(_heap);
  2568. icache_init();
  2569. os::register_code_area(_heap->low_boundary(), _heap->high_boundary());
  2570. }
  2571. void codeCache_init() {
  2572. CodeCache::initialize();
  2573. }
  2574. int CodeCache::number_of_nmethods_with_dependencies() {
  2575. return _number_of_nmethods_with_dependencies;
  2576. }
  2577. void CodeCache::clear_inline_caches() {
  2578. assert_locked_or_safepoint(CodeCache_lock);
  2579. FOR_ALL_ALIVE_NMETHODS(nm) {
  2580. nm->clear_inline_caches();
  2581. }
  2582. }
  2583. #ifndef PRODUCT
  2584. static elapsedTimer dependentCheckTime;
  2585. static int dependentCheckCount = 0;
  2586. #endif // PRODUCT
  2587. int CodeCache::mark_for_deoptimization(DepChange& changes) {
  2588. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  2589. #ifndef PRODUCT
  2590. dependentCheckTime.start();
  2591. dependentCheckCount++;
  2592. #endif // PRODUCT
  2593. int number_of_marked_CodeBlobs = 0;
  2594. { No_Safepoint_Verifier nsv;
  2595. for (DepChange::ContextStream str(changes, nsv); str.next(); ) {
  2596. Klass* d = str.klass();
  2597. number_of_marked_CodeBlobs += InstanceKlass::cast(d)->mark_dependent_nmethods(changes);
  2598. }
  2599. }
  2600. if (VerifyDependencies) {
  2601. NOT_PRODUCT( FlagSetting fs(TraceDependencies, false) );
  2602. FOR_ALL_ALIVE_NMETHODS(nm) {
  2603. if (!nm->is_marked_for_deoptimization() &&
  2604. nm->check_all_dependencies()) {
  2605. ResourceMark rm;
  2606. tty->print_cr("Should have been marked for deoptimization:");
  2607. changes.print();
  2608. nm->print();
  2609. nm->print_dependencies();
  2610. }
  2611. }
  2612. }
  2613. #ifndef PRODUCT
  2614. dependentCheckTime.stop();
  2615. #endif // PRODUCT
  2616. return number_of_marked_CodeBlobs;
  2617. }
  2618. #ifdef HOTSWAP
  2619. int CodeCache::mark_for_evol_deoptimization(instanceKlassHandle dependee) {
  2620. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  2621. int number_of_marked_CodeBlobs = 0;
  2622. Array<Method*>* old_methods = dependee->methods();
  2623. for (int i = 0; i < old_methods->length(); i++) {
  2624. ResourceMark rm;
  2625. Method* old_method = old_methods->at(i);
  2626. nmethod *nm = old_method->code();
  2627. if (nm != NULL) {
  2628. nm->mark_for_deoptimization();
  2629. number_of_marked_CodeBlobs++;
  2630. }
  2631. }
  2632. FOR_ALL_ALIVE_NMETHODS(nm) {
  2633. if (nm->is_marked_for_deoptimization()) {
  2634. } else if (nm->is_evol_dependent_on(dependee())) {
  2635. ResourceMark rm;
  2636. nm->mark_for_deoptimization();
  2637. number_of_marked_CodeBlobs++;
  2638. } else {
  2639. nm->clear_inline_caches();
  2640. }
  2641. }
  2642. return number_of_marked_CodeBlobs;
  2643. }
  2644. #endif // HOTSWAP
  2645. void CodeCache::mark_all_nmethods_for_deoptimization() {
  2646. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  2647. FOR_ALL_ALIVE_NMETHODS(nm) {
  2648. if (!nm->method()->is_method_handle_intrinsic()) {
  2649. nm->mark_for_deoptimization();
  2650. }
  2651. }
  2652. }
  2653. int CodeCache::mark_for_deoptimization(Method* dependee) {
  2654. MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
  2655. int number_of_marked_CodeBlobs = 0;
  2656. FOR_ALL_ALIVE_NMETHODS(nm) {
  2657. if (nm->is_dependent_on_method(dependee)) {
  2658. ResourceMark rm;
  2659. nm->mark_for_deoptimization();
  2660. number_of_marked_CodeBlobs++;
  2661. }
  2662. }
  2663. return number_of_marked_CodeBlobs;
  2664. }
  2665. void CodeCache::make_marked_nmethods_not_entrant() {
  2666. assert_locked_or_safepoint(CodeCache_lock);
  2667. FOR_ALL_ALIVE_NMETHODS(nm) {
  2668. if (nm->is_marked_for_deoptimization()) {
  2669. nm->make_not_entrant();
  2670. }
  2671. }
  2672. }
  2673. void CodeCache::verify() {
  2674. _heap->verify();
  2675. FOR_ALL_ALIVE_BLOBS(p) {
  2676. p->verify();
  2677. }
  2678. }
  2679. void CodeCache::report_codemem_full() {
  2680. _codemem_full_count++;
  2681. EventCodeCacheFull event;
  2682. if (event.should_commit()) {
  2683. event.set_codeBlobType((u1)CodeBlobType::All);
  2684. event.set_startAddress((u8)low_bound());
  2685. event.set_commitedTopAddress((u8)high());
  2686. event.set_reservedTopAddress((u8)high_bound());
  2687. event.set_entryCount(nof_blobs());
  2688. event.set_methodCount(nof_nmethods());
  2689. event.set_adaptorCount(nof_adapters());
  2690. event.set_unallocatedCapacity(unallocated_capacity()/K);
  2691. event.set_fullCount(_codemem_full_count);
  2692. event.commit();
  2693. }
  2694. }
  2695. #ifndef PRODUCT
  2696. void CodeCache::verify_if_often() {
  2697. if (VerifyCodeCacheOften) {
  2698. _heap->verify();
  2699. }
  2700. }
  2701. void CodeCache::print_trace(const char* event, CodeBlob* cb, int size) {
  2702. if (PrintCodeCache2) { // Need to add a new flag
  2703. ResourceMark rm;
  2704. if (size == 0) size = cb->size();
  2705. tty->print_cr("CodeCache %s: addr: " INTPTR_FORMAT ", size: 0x%x", event, p2i(cb), size);
  2706. }
  2707. }
  2708. void CodeCache::print_internals() {
  2709. int nmethodCount = 0;
  2710. int runtimeStubCount = 0;
  2711. int adapterCount = 0;
  2712. int deoptimizationStubCount = 0;
  2713. int uncommonTrapStubCount = 0;
  2714. int bufferBlobCount = 0;
  2715. int total = 0;
  2716. int nmethodAlive = 0;
  2717. int nmethodNotEntrant = 0;
  2718. int nmethodZombie = 0;
  2719. int nmethodUnloaded = 0;
  2720. int nmethodJava = 0;
  2721. int nmethodNative = 0;
  2722. int maxCodeSize = 0;
  2723. ResourceMark rm;
  2724. CodeBlob *cb;
  2725. for (cb = first(); cb != NULL; cb = next(cb)) {
  2726. total++;
  2727. if (cb->is_nmethod()) {
  2728. nmethod* nm = (nmethod*)cb;
  2729. if (Verbose && nm->method() != NULL) {
  2730. ResourceMark rm;
  2731. char *method_name = nm->method()->name_and_sig_as_C_string();
  2732. tty->print("%s", method_name);
  2733. if(nm->is_alive()) { tty->print_cr(" alive"); }
  2734. if(nm->is_not_entrant()) { tty->print_cr(" not-entrant"); }
  2735. if(nm->is_zombie()) { tty->print_cr(" zombie"); }
  2736. }
  2737. nmethodCount++;
  2738. if(nm->is_alive()) { nmethodAlive++; }
  2739. if(nm->is_not_entrant()) { nmethodNotEntrant++; }
  2740. if(nm->is_zombie()) { nmethodZombie++; }
  2741. if(nm->is_unloaded()) { nmethodUnloaded++; }
  2742. if(nm->is_native_method()) { nmethodNative++; }
  2743. if(nm->method() != NULL && nm->is_java_method()) {
  2744. nmethodJava++;
  2745. if (nm->insts_size() > maxCodeSize) {
  2746. maxCodeSize = nm->insts_size();
  2747. }
  2748. }
  2749. } else if (cb->is_runtime_stub()) {
  2750. runtimeStubCount++;
  2751. } else if (cb->is_deoptimization_stub()) {
  2752. deoptimizationStubCount++;
  2753. } else if (cb->is_uncommon_trap_stub()) {
  2754. uncommonTrapStubCount++;
  2755. } else if (cb->is_adapter_blob()) {
  2756. adapterCount++;
  2757. } else if (cb->is_buffer_blob()) {
  2758. bufferBlobCount++;
  2759. }
  2760. }
  2761. int bucketSize = 512;
  2762. int bucketLimit = maxCodeSize / bucketSize + 1;
  2763. int *buckets = NEW_C_HEAP_ARRAY(int, bucketLimit, mtCode);
  2764. memset(buckets,0,sizeof(int) * bucketLimit);
  2765. for (cb = first(); cb != NULL; cb = next(cb)) {
  2766. if (cb->is_nmethod()) {
  2767. nmethod* nm = (nmethod*)cb;
  2768. if(nm->is_java_method()) {
  2769. buckets[nm->insts_size() / bucketSize]++;
  2770. }
  2771. }
  2772. }
  2773. tty->print_cr("Code Cache Entries (total of %d)",total);
  2774. tty->print_cr("-------------------------------------------------");
  2775. tty->print_cr("nmethods: %d",nmethodCount);
  2776. tty->print_cr("\talive: %d",nmethodAlive);
  2777. tty->print_cr("\tnot_entrant: %d",nmethodNotEntrant);
  2778. tty->print_cr("\tzombie: %d",nmethodZombie);
  2779. tty->print_cr("\tunloaded: %d",nmethodUnloaded);
  2780. tty->print_cr("\tjava: %d",nmethodJava);
  2781. tty->print_cr("\tnative: %d",nmethodNative);
  2782. tty->print_cr("runtime_stubs: %d",runtimeStubCount);
  2783. tty->print_cr("adapters: %d",adapterCount);
  2784. tty->print_cr("buffer blobs: %d",bufferBlobCount);
  2785. tty->print_cr("deoptimization_stubs: %d",deoptimizationStubCount);
  2786. tty->print_cr("uncommon_traps: %d",uncommonTrapStubCount);
  2787. tty->print_cr("\nnmethod size distribution (non-zombie java)");
  2788. tty->print_cr("-------------------------------------------------");
  2789. for(int i=0; i<bucketLimit; i++) {
  2790. if(buckets[i] != 0) {
  2791. tty->print("%d - %d bytes",i*bucketSize,(i+1)*bucketSize);
  2792. tty->fill_to(40);
  2793. tty->print_cr("%d",buckets[i]);
  2794. }
  2795. }
  2796. FREE_C_HEAP_ARRAY(int, buckets, mtCode);
  2797. }
  2798. #endif // !PRODUCT
  2799. void CodeCache::print() {
  2800. print_summary(tty);
  2801. #ifndef PRODUCT
  2802. if (!Verbose) return;
  2803. CodeBlob_sizes live;
  2804. CodeBlob_sizes dead;
  2805. FOR_ALL_BLOBS(p) {
  2806. if (!p->is_alive()) {
  2807. dead.add(p);
  2808. } else {
  2809. live.add(p);
  2810. }
  2811. }
  2812. tty->print_cr("CodeCache:");
  2813. tty->print_cr("nmethod dependency checking time %f, per dependent %f", dependentCheckTime.seconds(),
  2814. dependentCheckTime.seconds() / dependentCheckCount);
  2815. if (!live.is_empty()) {
  2816. live.print("live");
  2817. }
  2818. if (!dead.is_empty()) {
  2819. dead.print("dead");
  2820. }
  2821. if (WizardMode) {
  2822. int code_size = 0;
  2823. int number_of_blobs = 0;
  2824. int number_of_oop_maps = 0;
  2825. int map_size = 0;
  2826. FOR_ALL_BLOBS(p) {
  2827. if (p->is_alive()) {
  2828. number_of_blobs++;
  2829. code_size += p->code_size();
  2830. OopMapSet* set = p->oop_maps();
  2831. if (set != NULL) {
  2832. number_of_oop_maps += set->size();
  2833. map_size += set->heap_size();
  2834. }
  2835. }
  2836. }
  2837. tty->print_cr("OopMaps");
  2838. tty->print_cr(" #blobs = %d", number_of_blobs);
  2839. tty->print_cr(" code size = %d", code_size);
  2840. tty->print_cr(" #oop_maps = %d", number_of_oop_maps);
  2841. tty->print_cr(" map size = %d", map_size);
  2842. }
  2843. #endif // !PRODUCT
  2844. }
  2845. void CodeCache::print_summary(outputStream* st, bool detailed) {
  2846. size_t total = (_heap->high_boundary() - _heap->low_boundary());
  2847. st->print_cr("CodeCache: size=" SIZE_FORMAT "Kb used=" SIZE_FORMAT
  2848. "Kb max_used=" SIZE_FORMAT "Kb free=" SIZE_FORMAT "Kb",
  2849. total/K, (total - unallocated_capacity())/K,
  2850. maxCodeCacheUsed/K, unallocated_capacity()/K);
  2851. if (detailed) {
  2852. st->print_cr(" bounds [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT "]",
  2853. p2i(_heap->low_boundary()),
  2854. p2i(_heap->high()),
  2855. p2i(_heap->high_boundary()));
  2856. st->print_cr(" total_blobs=" UINT32_FORMAT " nmethods=" UINT32_FORMAT
  2857. " adapters=" UINT32_FORMAT,
  2858. nof_blobs(), nof_nmethods(), nof_adapters());
  2859. st->print_cr(" compilation: %s", CompileBroker::should_compile_new_jobs() ?
  2860. "enabled" : Arguments::mode() == Arguments::_int ?
  2861. "disabled (interpreter mode)" :
  2862. "disabled (not enough contiguous free space left)");
  2863. }
  2864. }
  2865. void CodeCache::log_state(outputStream* st) {
  2866. st->print(" total_blobs='" UINT32_FORMAT "' nmethods='" UINT32_FORMAT "'"
  2867. " adapters='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'",
  2868. nof_blobs(), nof_nmethods(), nof_adapters(),
  2869. unallocated_capacity());
  2870. }
  2871. C:\hotspot-69087d08d473\src\share\vm/code/codeCache.hpp
  2872. #ifndef SHARE_VM_CODE_CODECACHE_HPP
  2873. #define SHARE_VM_CODE_CODECACHE_HPP
  2874. #include "code/codeBlob.hpp"
  2875. #include "memory/allocation.hpp"
  2876. #include "memory/heap.hpp"
  2877. #include "oops/instanceKlass.hpp"
  2878. #include "oops/oopsHierarchy.hpp"
  2879. class OopClosure;
  2880. class DepChange;
  2881. class CodeCache : AllStatic {
  2882. friend class VMStructs;
  2883. private:
  2884. static CodeHeap * _heap;
  2885. static int _number_of_blobs;
  2886. static int _number_of_adapters;
  2887. static int _number_of_nmethods;
  2888. static int _number_of_nmethods_with_dependencies;
  2889. static bool _needs_cache_clean;
  2890. static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link()
  2891. static void verify_if_often() PRODUCT_RETURN;
  2892. static void mark_scavenge_root_nmethods() PRODUCT_RETURN;
  2893. static void verify_perm_nmethods(CodeBlobClosure* f_or_null) PRODUCT_RETURN;
  2894. static int _codemem_full_count;
  2895. static void set_scavenge_root_nmethods(nmethod* nm) { _scavenge_root_nmethods = nm; }
  2896. static void prune_scavenge_root_nmethods();
  2897. static void unlink_scavenge_root_nmethod(nmethod* nm, nmethod* prev);
  2898. public:
  2899. static void initialize();
  2900. static void report_codemem_full();
  2901. static CodeBlob* allocate(int size, bool is_critical = false); // allocates a new CodeBlob
  2902. static void commit(CodeBlob* cb); // called when the allocated CodeBlob has been filled
  2903. static int alignment_unit(); // guaranteed alignment of all CodeBlobs
  2904. static int alignment_offset(); // guaranteed offset of first CodeBlob byte within alignment unit (i.e., allocation header)
  2905. static void free(CodeBlob* cb); // frees a CodeBlob
  2906. static void flush(); // flushes all CodeBlobs
  2907. static bool contains(void *p); // returns whether p is included
  2908. static void blobs_do(void f(CodeBlob* cb)); // iterates over all CodeBlobs
  2909. static void blobs_do(CodeBlobClosure* f); // iterates over all CodeBlobs
  2910. static void nmethods_do(void f(nmethod* nm)); // iterates over all nmethods
  2911. static void alive_nmethods_do(void f(nmethod* nm)); // iterates over all alive nmethods
  2912. static CodeBlob* find_blob(void* start);
  2913. static nmethod* find_nmethod(void* start);
  2914. static CodeBlob* find_blob_unsafe(void* start) {
  2915. if (_heap == NULL) return NULL;
  2916. CodeBlob* result = (CodeBlob*)_heap->find_start(start);
  2917. if (result != NULL && !result->blob_contains((address)start)) {
  2918. result = NULL;
  2919. }
  2920. return result;
  2921. }
  2922. static CodeBlob* first();
  2923. static CodeBlob* next (CodeBlob* cb);
  2924. static CodeBlob* alive(CodeBlob *cb);
  2925. static nmethod* alive_nmethod(CodeBlob *cb);
  2926. static nmethod* first_nmethod();
  2927. static nmethod* next_nmethod (CodeBlob* cb);
  2928. static int nof_blobs() { return _number_of_blobs; }
  2929. static int nof_adapters() { return _number_of_adapters; }
  2930. static int nof_nmethods() { return _number_of_nmethods; }
  2931. static void gc_epilogue();
  2932. static void gc_prologue();
  2933. static void verify_oops();
  2934. static void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred);
  2935. static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* f = NULL) PRODUCT_RETURN;
  2936. static void scavenge_root_nmethods_do(CodeBlobToOopClosure* f);
  2937. static nmethod* scavenge_root_nmethods() { return _scavenge_root_nmethods; }
  2938. static void add_scavenge_root_nmethod(nmethod* nm);
  2939. static void drop_scavenge_root_nmethod(nmethod* nm);
  2940. static void print(); // prints summary
  2941. static void print_internals();
  2942. static void verify(); // verifies the code cache
  2943. static void print_trace(const char* event, CodeBlob* cb, int size = 0) PRODUCT_RETURN;
  2944. static void print_summary(outputStream* st, bool detailed = true); // Prints a summary of the code cache usage
  2945. static void log_state(outputStream* st);
  2946. static address low_bound() { return (address) _heap->low_boundary(); }
  2947. static address high_bound() { return (address) _heap->high_boundary(); }
  2948. static address high() { return (address) _heap->high(); }
  2949. static address first_address(); // first address used for CodeBlobs
  2950. static address last_address(); // last address used for CodeBlobs
  2951. static size_t capacity() { return _heap->capacity(); }
  2952. static size_t max_capacity() { return _heap->max_capacity(); }
  2953. static size_t unallocated_capacity() { return _heap->unallocated_capacity(); }
  2954. static double reverse_free_ratio();
  2955. static bool needs_cache_clean() { return _needs_cache_clean; }
  2956. static void set_needs_cache_clean(bool v) { _needs_cache_clean = v; }
  2957. static void clear_inline_caches(); // clear all inline caches
  2958. static void verify_clean_inline_caches();
  2959. static void verify_icholder_relocations();
  2960. static int mark_for_deoptimization(DepChange& changes);
  2961. #ifdef HOTSWAP
  2962. static int mark_for_evol_deoptimization(instanceKlassHandle dependee);
  2963. #endif // HOTSWAP
  2964. static void mark_all_nmethods_for_deoptimization();
  2965. static int mark_for_deoptimization(Method* dependee);
  2966. static void make_marked_nmethods_not_entrant();
  2967. static int number_of_nmethods_with_dependencies();
  2968. static int get_codemem_full_count() { return _codemem_full_count; }
  2969. };
  2970. #endif // SHARE_VM_CODE_CODECACHE_HPP
  2971. C:\hotspot-69087d08d473\src\share\vm/code/compiledIC.cpp
  2972. #include "precompiled.hpp"
  2973. #include "classfile/systemDictionary.hpp"
  2974. #include "code/codeCache.hpp"
  2975. #include "code/compiledIC.hpp"
  2976. #include "code/icBuffer.hpp"
  2977. #include "code/nmethod.hpp"
  2978. #include "code/vtableStubs.hpp"
  2979. #include "interpreter/interpreter.hpp"
  2980. #include "interpreter/linkResolver.hpp"
  2981. #include "memory/metadataFactory.hpp"
  2982. #include "memory/oopFactory.hpp"
  2983. #include "oops/method.hpp"
  2984. #include "oops/oop.inline.hpp"
  2985. #include "oops/symbol.hpp"
  2986. #include "runtime/icache.hpp"
  2987. #include "runtime/sharedRuntime.hpp"
  2988. #include "runtime/stubRoutines.hpp"
  2989. #include "utilities/events.hpp"
  2990. void* CompiledIC::cached_value() const {
  2991. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  2992. assert (!is_optimized(), "an optimized virtual call does not have a cached metadata");
  2993. if (!is_in_transition_state()) {
  2994. void* data = (void*)_value->data();
  2995. assert(data != NULL || Universe::non_oop_word() == NULL,
  2996. "no raw nulls in CompiledIC metadatas, because of patching races");
  2997. return (data == (void*)Universe::non_oop_word()) ? NULL : data;
  2998. } else {
  2999. return InlineCacheBuffer::cached_value_for((CompiledIC *)this);
  3000. }
  3001. }
  3002. void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) {
  3003. assert(entry_point != NULL, "must set legal entry point");
  3004. assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3005. assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata");
  3006. assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata");
  3007. assert(!is_icholder || is_icholder_entry(entry_point), "must be");
  3008. if (is_icholder_entry(_ic_call->destination())) {
  3009. InlineCacheBuffer::queue_for_release((CompiledICHolder*)_value->data());
  3010. }
  3011. if (TraceCompiledIC) {
  3012. tty->print(" ");
  3013. print_compiled_ic();
  3014. tty->print(" changing destination to " INTPTR_FORMAT, p2i(entry_point));
  3015. if (!is_optimized()) {
  3016. tty->print(" changing cached %s to " INTPTR_FORMAT, is_icholder ? "icholder" : "metadata", p2i((address)cache));
  3017. }
  3018. if (is_icstub) {
  3019. tty->print(" (icstub)");
  3020. }
  3021. tty->cr();
  3022. }
  3023. {
  3024. MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
  3025. #ifdef ASSERT
  3026. CodeBlob* cb = CodeCache::find_blob_unsafe(_ic_call);
  3027. assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
  3028. #endif
  3029. _ic_call->set_destination_mt_safe(entry_point);
  3030. }
  3031. if (is_optimized() || is_icstub) {
  3032. assert(cache == NULL, "must be null");
  3033. return;
  3034. }
  3035. if (cache == NULL) cache = (void*)Universe::non_oop_word();
  3036. _value->set_data((intptr_t)cache);
  3037. }
  3038. void CompiledIC::set_ic_destination(ICStub* stub) {
  3039. internal_set_ic_destination(stub->code_begin(), true, NULL, false);
  3040. }
  3041. address CompiledIC::ic_destination() const {
  3042. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3043. if (!is_in_transition_state()) {
  3044. return _ic_call->destination();
  3045. } else {
  3046. return InlineCacheBuffer::ic_destination_for((CompiledIC *)this);
  3047. }
  3048. }
  3049. bool CompiledIC::is_in_transition_state() const {
  3050. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3051. return InlineCacheBuffer::contains(_ic_call->destination());
  3052. }
  3053. bool CompiledIC::is_icholder_call() const {
  3054. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3055. return !_is_optimized && is_icholder_entry(ic_destination());
  3056. }
  3057. address CompiledIC::stub_address() const {
  3058. assert(is_in_transition_state(), "should only be called when we are in a transition state");
  3059. return _ic_call->destination();
  3060. }
  3061. void CompiledIC::clear_ic_stub() {
  3062. if (is_in_transition_state()) {
  3063. ICStub* stub = ICStub_from_destination_address(stub_address());
  3064. stub->clear();
  3065. }
  3066. }
  3067. void CompiledIC::initialize_from_iter(RelocIterator* iter) {
  3068. assert(iter->addr() == _ic_call->instruction_address(), "must find ic_call");
  3069. if (iter->type() == relocInfo::virtual_call_type) {
  3070. virtual_call_Relocation* r = iter->virtual_call_reloc();
  3071. _is_optimized = false;
  3072. _value = nativeMovConstReg_at(r->cached_value());
  3073. } else {
  3074. assert(iter->type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
  3075. _is_optimized = true;
  3076. _value = NULL;
  3077. }
  3078. }
  3079. CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
  3080. : _ic_call(call)
  3081. {
  3082. address ic_call = _ic_call->instruction_address();
  3083. assert(ic_call != NULL, "ic_call address must be set");
  3084. assert(nm != NULL, "must pass nmethod");
  3085. assert(nm->contains(ic_call), "must be in nmethod");
  3086. RelocIterator iter(nm, ic_call, ic_call+1);
  3087. bool ret = iter.next();
  3088. assert(ret == true, "relocInfo must exist at this address");
  3089. assert(iter.addr() == ic_call, "must find ic_call");
  3090. initialize_from_iter(&iter);
  3091. }
  3092. CompiledIC::CompiledIC(RelocIterator* iter)
  3093. : _ic_call(nativeCall_at(iter->addr()))
  3094. {
  3095. address ic_call = _ic_call->instruction_address();
  3096. nmethod* nm = iter->code();
  3097. assert(ic_call != NULL, "ic_call address must be set");
  3098. assert(nm != NULL, "must pass nmethod");
  3099. assert(nm->contains(ic_call), "must be in nmethod");
  3100. initialize_from_iter(iter);
  3101. }
  3102. bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
  3103. assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3104. assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
  3105. assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
  3106. address entry;
  3107. if (call_info->call_kind() == CallInfo::itable_call) {
  3108. assert(bytecode == Bytecodes::_invokeinterface, "");
  3109. int itable_index = call_info->itable_index();
  3110. entry = VtableStubs::find_itable_stub(itable_index);
  3111. if (entry == false) {
  3112. return false;
  3113. }
  3114. #ifdef ASSERT
  3115. int index = call_info->resolved_method()->itable_index();
  3116. assert(index == itable_index, "CallInfo pre-computes this");
  3117. InstanceKlass* k = call_info->resolved_method()->method_holder();
  3118. assert(k->verify_itable_index(itable_index), "sanity check");
  3119. #endif //ASSERT
  3120. CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
  3121. call_info->resolved_klass()(), false);
  3122. holder->claim();
  3123. InlineCacheBuffer::create_transition_stub(this, holder, entry);
  3124. } else {
  3125. assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
  3126. int vtable_index = call_info->vtable_index();
  3127. assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
  3128. entry = VtableStubs::find_vtable_stub(vtable_index);
  3129. if (entry == NULL) {
  3130. return false;
  3131. }
  3132. InlineCacheBuffer::create_transition_stub(this, NULL, entry);
  3133. }
  3134. if (TraceICs) {
  3135. ResourceMark rm;
  3136. tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
  3137. p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
  3138. }
  3139. return true;
  3140. }
  3141. bool CompiledIC::is_megamorphic() const {
  3142. assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3143. assert(!is_optimized(), "an optimized call cannot be megamorphic");
  3144. return VtableStubs::entry_point(ic_destination()) != NULL;
  3145. }
  3146. bool CompiledIC::is_call_to_compiled() const {
  3147. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3148. CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
  3149. bool is_monomorphic = (cb != NULL && cb->is_nmethod());
  3150. #ifdef ASSERT
  3151. CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
  3152. bool is_c1_method = caller->is_compiled_by_c1();
  3153. assert( is_c1_method ||
  3154. !is_monomorphic ||
  3155. is_optimized() ||
  3156. !caller->is_alive() ||
  3157. (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
  3158. #endif // ASSERT
  3159. return is_monomorphic;
  3160. }
  3161. bool CompiledIC::is_call_to_interpreted() const {
  3162. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3163. bool is_call_to_interpreted = false;
  3164. if (!is_optimized()) {
  3165. CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination());
  3166. is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob());
  3167. assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");
  3168. } else {
  3169. CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address());
  3170. address dest = ic_destination();
  3171. #ifdef ASSERT
  3172. {
  3173. CodeBlob* db = CodeCache::find_blob_unsafe(dest);
  3174. assert(!db->is_adapter_blob(), "must use stub!");
  3175. }
  3176. #endif /* ASSERT */
  3177. is_call_to_interpreted = cb->contains(dest);
  3178. }
  3179. return is_call_to_interpreted;
  3180. }
  3181. void CompiledIC::set_to_clean(bool in_use) {
  3182. assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
  3183. if (TraceInlineCacheClearing || TraceICs) {
  3184. tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
  3185. print();
  3186. }
  3187. address entry;
  3188. if (is_optimized()) {
  3189. entry = SharedRuntime::get_resolve_opt_virtual_call_stub();
  3190. } else {
  3191. entry = SharedRuntime::get_resolve_virtual_call_stub();
  3192. }
  3193. bool safe_transition = !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();
  3194. if (safe_transition) {
  3195. clear_ic_stub();
  3196. if (is_optimized()) {
  3197. set_ic_destination(entry);
  3198. } else {
  3199. set_ic_destination_and_value(entry, (void*)NULL);
  3200. }
  3201. } else {
  3202. InlineCacheBuffer::create_transition_stub(this, NULL, entry);
  3203. }
  3204. }
  3205. bool CompiledIC::is_clean() const {
  3206. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3207. bool is_clean = false;
  3208. address dest = ic_destination();
  3209. is_clean = dest == SharedRuntime::get_resolve_opt_virtual_call_stub() ||
  3210. dest == SharedRuntime::get_resolve_virtual_call_stub();
  3211. assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check");
  3212. return is_clean;
  3213. }
  3214. void CompiledIC::set_to_monomorphic(CompiledICInfo& info) {
  3215. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
  3216. Thread *thread = Thread::current();
  3217. if (info.to_interpreter()) {
  3218. if (info.is_optimized() && is_optimized()) {
  3219. assert(is_clean(), "unsafe IC path");
  3220. MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
  3221. assert(info.cached_metadata() != NULL && info.cached_metadata()->is_method(), "sanity check");
  3222. CompiledStaticCall* csc = compiledStaticCall_at(instruction_address());
  3223. methodHandle method (thread, (Method*)info.cached_metadata());
  3224. csc->set_to_interpreted(method, info.entry());
  3225. if (TraceICs) {
  3226. ResourceMark rm(thread);
  3227. tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter: %s",
  3228. p2i(instruction_address()),
  3229. method->print_value_string());
  3230. }
  3231. } else {
  3232. InlineCacheBuffer::create_transition_stub(this, info.claim_cached_icholder(), info.entry());
  3233. if (TraceICs) {
  3234. ResourceMark rm(thread);
  3235. tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via icholder ", p2i(instruction_address()));
  3236. }
  3237. }
  3238. } else {
  3239. bool static_bound = info.is_optimized() || (info.cached_metadata() == NULL);
  3240. #ifdef ASSERT
  3241. CodeBlob* cb = CodeCache::find_blob_unsafe(info.entry());
  3242. assert (cb->is_nmethod(), "must be compiled!");
  3243. #endif /* ASSERT */
  3244. bool safe = SafepointSynchronize::is_at_safepoint() ||
  3245. (!is_in_transition_state() && (info.is_optimized() || static_bound || is_clean()));
  3246. if (!safe) {
  3247. InlineCacheBuffer::create_transition_stub(this, info.cached_metadata(), info.entry());
  3248. } else {
  3249. if (is_optimized()) {
  3250. set_ic_destination(info.entry());
  3251. } else {
  3252. set_ic_destination_and_value(info.entry(), info.cached_metadata());
  3253. }
  3254. }
  3255. if (TraceICs) {
  3256. ResourceMark rm(thread);
  3257. assert(info.cached_metadata() == NULL || info.cached_metadata()->is_klass(), "must be");
  3258. tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to compiled (rcvr klass) %s: %s",
  3259. p2i(instruction_address()),
  3260. ((Klass*)info.cached_metadata())->print_value_string(),
  3261. (safe) ? "" : "via stub");
  3262. }
  3263. }
  3264. }
  3265. void CompiledIC::compute_monomorphic_entry(methodHandle method,
  3266. KlassHandle receiver_klass,
  3267. bool is_optimized,
  3268. bool static_bound,
  3269. CompiledICInfo& info,
  3270. TRAPS) {
  3271. nmethod* method_code = method->code();
  3272. address entry = NULL;
  3273. if (method_code != NULL && method_code->is_in_use()) {
  3274. if (static_bound || is_optimized) {
  3275. entry = method_code->verified_entry_point();
  3276. } else {
  3277. entry = method_code->entry_point();
  3278. }
  3279. }
  3280. if (entry != NULL) {
  3281. info.set_compiled_entry(entry, (static_bound || is_optimized) ? NULL : receiver_klass(), is_optimized);
  3282. } else {
  3283. #ifdef COMPILER2
  3284. #ifdef TIERED
  3285. #if defined(ASSERT)
  3286. #endif // ASSERT
  3287. #else
  3288. assert(!static_bound || is_optimized, "static_bound should imply is_optimized");
  3289. #endif // TIERED
  3290. #endif // COMPILER2
  3291. if (is_optimized) {
  3292. info.set_interpreter_entry(method()->get_c2i_entry(), method());
  3293. } else {
  3294. CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass());
  3295. info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
  3296. }
  3297. }
  3298. assert(info.is_optimized() == is_optimized, "must agree");
  3299. }
  3300. bool CompiledIC::is_icholder_entry(address entry) {
  3301. CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
  3302. if (cb != NULL && cb->is_adapter_blob()) {
  3303. return true;
  3304. }
  3305. if (cb != NULL && cb->is_vtable_blob()) {
  3306. VtableStub* s = VtableStubs::entry_point(entry);
  3307. return (s != NULL) && s->is_itable_stub();
  3308. }
  3309. return false;
  3310. }
  3311. void CompiledStaticCall::set_to_clean() {
  3312. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
  3313. MutexLockerEx pl(SafepointSynchronize::is_at_safepoint() ? NULL : Patching_lock, Mutex::_no_safepoint_check_flag);
  3314. #ifdef ASSERT
  3315. CodeBlob* cb = CodeCache::find_blob_unsafe(this);
  3316. assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
  3317. #endif
  3318. set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
  3319. }
  3320. bool CompiledStaticCall::is_clean() const {
  3321. return destination() == SharedRuntime::get_resolve_static_call_stub();
  3322. }
  3323. bool CompiledStaticCall::is_call_to_compiled() const {
  3324. return CodeCache::contains(destination());
  3325. }
  3326. bool CompiledStaticCall::is_call_to_interpreted() const {
  3327. nmethod* nm = CodeCache::find_nmethod(instruction_address());
  3328. return nm->stub_contains(destination());
  3329. }
  3330. void CompiledStaticCall::set(const StaticCallInfo& info) {
  3331. assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
  3332. MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
  3333. assert(is_clean(), "do not update a call entry - use clean");
  3334. if (info._to_interpreter) {
  3335. set_to_interpreted(info.callee(), info.entry());
  3336. } else {
  3337. if (TraceICs) {
  3338. ResourceMark rm;
  3339. tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_compiled " INTPTR_FORMAT,
  3340. p2i(instruction_address()),
  3341. p2i(info.entry()));
  3342. }
  3343. assert (CodeCache::contains(info.entry()), "wrong entry point");
  3344. set_destination_mt_safe(info.entry());
  3345. }
  3346. }
  3347. void CompiledStaticCall::compute_entry(methodHandle m, StaticCallInfo& info) {
  3348. nmethod* m_code = m->code();
  3349. info._callee = m;
  3350. if (m_code != NULL && m_code->is_in_use()) {
  3351. info._to_interpreter = false;
  3352. info._entry = m_code->verified_entry_point();
  3353. } else {
  3354. assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
  3355. info._to_interpreter = true;
  3356. info._entry = m()->get_c2i_entry();
  3357. }
  3358. }
  3359. address CompiledStaticCall::find_stub() {
  3360. RelocIterator iter((nmethod*)NULL, instruction_address());
  3361. while (iter.next()) {
  3362. if (iter.addr() == instruction_address()) {
  3363. switch(iter.type()) {
  3364. case relocInfo::static_call_type:
  3365. return iter.static_call_reloc()->static_stub();
  3366. case relocInfo::opt_virtual_call_type:
  3367. return iter.opt_virtual_call_reloc()->static_stub();
  3368. case relocInfo::poll_type:
  3369. case relocInfo::poll_return_type: // A safepoint can't overlap a call.
  3370. default:
  3371. ShouldNotReachHere();
  3372. }
  3373. }
  3374. }
  3375. return NULL;
  3376. }
  3377. #ifndef PRODUCT
  3378. void CompiledIC::verify() {
  3379. _ic_call->verify();
  3380. if (os::is_MP()) {
  3381. _ic_call->verify_alignment();
  3382. }
  3383. assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()
  3384. || is_optimized() || is_megamorphic(), "sanity check");
  3385. }
  3386. void CompiledIC::print() {
  3387. print_compiled_ic();
  3388. tty->cr();
  3389. }
  3390. void CompiledIC::print_compiled_ic() {
  3391. tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
  3392. p2i(instruction_address()), is_call_to_interpreted() ? "interpreted " : "", p2i(ic_destination()), p2i(is_optimized() ? NULL : cached_value()));
  3393. }
  3394. void CompiledStaticCall::print() {
  3395. tty->print("static call at " INTPTR_FORMAT " -> ", p2i(instruction_address()));
  3396. if (is_clean()) {
  3397. tty->print("clean");
  3398. } else if (is_call_to_compiled()) {
  3399. tty->print("compiled");
  3400. } else if (is_call_to_interpreted()) {
  3401. tty->print("interpreted");
  3402. }
  3403. tty->cr();
  3404. }
  3405. #endif // !PRODUCT
  3406. C:\hotspot-69087d08d473\src\share\vm/code/compiledIC.hpp
  3407. #ifndef SHARE_VM_CODE_COMPILEDIC_HPP
  3408. #define SHARE_VM_CODE_COMPILEDIC_HPP
  3409. #include "interpreter/linkResolver.hpp"
  3410. #include "oops/compiledICHolder.hpp"
  3411. #ifdef TARGET_ARCH_x86
  3412. # include "nativeInst_x86.hpp"
  3413. #endif
  3414. #ifdef TARGET_ARCH_aarch64
  3415. # include "nativeInst_aarch64.hpp"
  3416. #endif
  3417. #ifdef TARGET_ARCH_sparc
  3418. # include "nativeInst_sparc.hpp"
  3419. #endif
  3420. #ifdef TARGET_ARCH_zero
  3421. # include "nativeInst_zero.hpp"
  3422. #endif
  3423. #ifdef TARGET_ARCH_arm
  3424. # include "nativeInst_arm.hpp"
  3425. #endif
  3426. #ifdef TARGET_ARCH_ppc
  3427. # include "nativeInst_ppc.hpp"
  3428. #endif
  3429. class CompiledIC;
  3430. class ICStub;
  3431. class CompiledICInfo : public StackObj {
  3432. private:
  3433. address _entry; // entry point for call
  3434. void* _cached_value; // Value of cached_value (either in stub or inline cache)
  3435. bool _is_icholder; // Is the cached value a CompiledICHolder*
  3436. bool _is_optimized; // it is an optimized virtual call (i.e., can be statically bound)
  3437. bool _to_interpreter; // Call it to interpreter
  3438. bool _release_icholder;
  3439. public:
  3440. address entry() const { return _entry; }
  3441. Metadata* cached_metadata() const { assert(!_is_icholder, ""); return (Metadata*)_cached_value; }
  3442. CompiledICHolder* claim_cached_icholder() {
  3443. assert(_is_icholder, "");
  3444. assert(_cached_value != NULL, "must be non-NULL");
  3445. _release_icholder = false;
  3446. CompiledICHolder* icholder = (CompiledICHolder*)_cached_value;
  3447. icholder->claim();
  3448. return icholder;
  3449. }
  3450. bool is_optimized() const { return _is_optimized; }
  3451. bool to_interpreter() const { return _to_interpreter; }
  3452. void set_compiled_entry(address entry, Klass* klass, bool is_optimized) {
  3453. _entry = entry;
  3454. _cached_value = (void*)klass;
  3455. _to_interpreter = false;
  3456. _is_icholder = false;
  3457. _is_optimized = is_optimized;
  3458. _release_icholder = false;
  3459. }
  3460. void set_interpreter_entry(address entry, Method* method) {
  3461. _entry = entry;
  3462. _cached_value = (void*)method;
  3463. _to_interpreter = true;
  3464. _is_icholder = false;
  3465. _is_optimized = true;
  3466. _release_icholder = false;
  3467. }
  3468. void set_icholder_entry(address entry, CompiledICHolder* icholder) {
  3469. _entry = entry;
  3470. _cached_value = (void*)icholder;
  3471. _to_interpreter = true;
  3472. _is_icholder = true;
  3473. _is_optimized = false;
  3474. _release_icholder = true;
  3475. }
  3476. CompiledICInfo(): _entry(NULL), _cached_value(NULL), _is_icholder(false),
  3477. _to_interpreter(false), _is_optimized(false), _release_icholder(false) {
  3478. }
  3479. ~CompiledICInfo() {
  3480. if (_release_icholder) {
  3481. assert(_is_icholder, "must be");
  3482. CompiledICHolder* icholder = (CompiledICHolder*)_cached_value;
  3483. icholder->claim();
  3484. delete icholder;
  3485. }
  3486. }
  3487. };
  3488. class CompiledIC: public ResourceObj {
  3489. friend class InlineCacheBuffer;
  3490. friend class ICStub;
  3491. private:
  3492. NativeCall* _ic_call; // the call instruction
  3493. NativeMovConstReg* _value; // patchable value cell for this IC
  3494. bool _is_optimized; // an optimized virtual call (i.e., no compiled IC)
  3495. CompiledIC(nmethod* nm, NativeCall* ic_call);
  3496. CompiledIC(RelocIterator* iter);
  3497. void initialize_from_iter(RelocIterator* iter);
  3498. static bool is_icholder_entry(address entry);
  3499. void internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder);
  3500. void set_ic_destination(ICStub* stub);
  3501. void set_ic_destination(address entry_point) {
  3502. assert(_is_optimized, "use set_ic_destination_and_value instead");
  3503. internal_set_ic_destination(entry_point, false, NULL, false);
  3504. }
  3505. void set_ic_destination_and_value(address entry_point, void* value) {
  3506. internal_set_ic_destination(entry_point, false, value, is_icholder_entry(entry_point));
  3507. }
  3508. void set_ic_destination_and_value(address entry_point, Metadata* value) {
  3509. internal_set_ic_destination(entry_point, false, value, false);
  3510. }
  3511. void set_ic_destination_and_value(address entry_point, CompiledICHolder* value) {
  3512. internal_set_ic_destination(entry_point, false, value, true);
  3513. }
  3514. address stub_address() const;
  3515. bool is_in_transition_state() const; // Use InlineCacheBuffer
  3516. public:
  3517. friend CompiledIC* CompiledIC_before(nmethod* nm, address return_addr);
  3518. friend CompiledIC* CompiledIC_at(nmethod* nm, address call_site);
  3519. friend CompiledIC* CompiledIC_at(Relocation* call_site);
  3520. friend CompiledIC* CompiledIC_at(RelocIterator* reloc_iter);
  3521. static void cleanup_call_site(virtual_call_Relocation* call_site);
  3522. static bool is_icholder_call_site(virtual_call_Relocation* call_site);
  3523. void* cached_value() const;
  3524. CompiledICHolder* cached_icholder() const {
  3525. assert(is_icholder_call(), "must be");
  3526. return (CompiledICHolder*) cached_value();
  3527. }
  3528. Metadata* cached_metadata() const {
  3529. assert(!is_icholder_call(), "must be");
  3530. return (Metadata*) cached_value();
  3531. }
  3532. address ic_destination() const;
  3533. bool is_optimized() const { return _is_optimized; }
  3534. bool is_clean() const;
  3535. bool is_megamorphic() const;
  3536. bool is_call_to_compiled() const;
  3537. bool is_call_to_interpreted() const;
  3538. bool is_icholder_call() const;
  3539. address end_of_call() { return _ic_call->return_address(); }
  3540. void set_to_clean(bool in_use = true);
  3541. void set_to_monomorphic(CompiledICInfo& info);
  3542. void clear_ic_stub();
  3543. bool set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS);
  3544. static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass,
  3545. bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS);
  3546. address instruction_address() const { return _ic_call->instruction_address(); }
  3547. void print() PRODUCT_RETURN;
  3548. void print_compiled_ic() PRODUCT_RETURN;
  3549. void verify() PRODUCT_RETURN;
  3550. };
  3551. inline CompiledIC* CompiledIC_before(nmethod* nm, address return_addr) {
  3552. CompiledIC* c_ic = new CompiledIC(nm, nativeCall_before(return_addr));
  3553. c_ic->verify();
  3554. return c_ic;
  3555. }
  3556. inline CompiledIC* CompiledIC_at(nmethod* nm, address call_site) {
  3557. CompiledIC* c_ic = new CompiledIC(nm, nativeCall_at(call_site));
  3558. c_ic->verify();
  3559. return c_ic;
  3560. }
  3561. inline CompiledIC* CompiledIC_at(Relocation* call_site) {
  3562. assert(call_site->type() == relocInfo::virtual_call_type ||
  3563. call_site->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");
  3564. CompiledIC* c_ic = new CompiledIC(call_site->code(), nativeCall_at(call_site->addr()));
  3565. c_ic->verify();
  3566. return c_ic;
  3567. }
  3568. inline CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {
  3569. assert(reloc_iter->type() == relocInfo::virtual_call_type ||
  3570. reloc_iter->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");
  3571. CompiledIC* c_ic = new CompiledIC(reloc_iter);
  3572. c_ic->verify();
  3573. return c_ic;
  3574. }
  3575. class CompiledStaticCall;
  3576. class StaticCallInfo {
  3577. private:
  3578. address _entry; // Entrypoint
  3579. methodHandle _callee; // Callee (used when calling interpreter)
  3580. bool _to_interpreter; // call to interpreted method (otherwise compiled)
  3581. friend class CompiledStaticCall;
  3582. public:
  3583. address entry() const { return _entry; }
  3584. methodHandle callee() const { return _callee; }
  3585. };
  3586. class CompiledStaticCall: public NativeCall {
  3587. friend class CompiledIC;
  3588. void set_to_interpreted(methodHandle callee, address entry);
  3589. bool is_optimized_virtual();
  3590. public:
  3591. friend CompiledStaticCall* compiledStaticCall_before(address return_addr);
  3592. friend CompiledStaticCall* compiledStaticCall_at(address native_call);
  3593. friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site);
  3594. #if defined(AARCH64) && !defined(ZERO)
  3595. static address emit_to_interp_stub(CodeBuffer &cbuf, address mark);
  3596. #else
  3597. static address emit_to_interp_stub(CodeBuffer &cbuf);
  3598. #endif
  3599. static int to_interp_stub_size();
  3600. static int reloc_to_interp_stub();
  3601. bool is_clean() const;
  3602. bool is_call_to_compiled() const;
  3603. bool is_call_to_interpreted() const;
  3604. void set_to_clean();
  3605. void set(const StaticCallInfo& info);
  3606. static void compute_entry(methodHandle m, StaticCallInfo& info);
  3607. address find_stub();
  3608. static void set_stub_to_clean(static_stub_Relocation* static_stub);
  3609. void print() PRODUCT_RETURN;
  3610. void verify() PRODUCT_RETURN;
  3611. };
  3612. inline CompiledStaticCall* compiledStaticCall_before(address return_addr) {
  3613. CompiledStaticCall* st = (CompiledStaticCall*)nativeCall_before(return_addr);
  3614. st->verify();
  3615. return st;
  3616. }
  3617. inline CompiledStaticCall* compiledStaticCall_at(address native_call) {
  3618. CompiledStaticCall* st = (CompiledStaticCall*)native_call;
  3619. st->verify();
  3620. return st;
  3621. }
  3622. inline CompiledStaticCall* compiledStaticCall_at(Relocation* call_site) {
  3623. return compiledStaticCall_at(call_site->addr());
  3624. }
  3625. #endif // SHARE_VM_CODE_COMPILEDIC_HPP
  3626. C:\hotspot-69087d08d473\src\share\vm/code/compressedStream.cpp
  3627. #include "precompiled.hpp"
  3628. #include "code/compressedStream.hpp"
  3629. #include "utilities/ostream.hpp"
  3630. inline juint CompressedStream::encode_sign(jint value) {
  3631. return (value << 1) ^ (value >> 31);
  3632. }
  3633. inline jint CompressedStream::decode_sign(juint value) {
  3634. return (value >> 1) ^ -(jint)(value & 1);
  3635. }
  3636. inline juint CompressedStream::reverse_int(juint i) {
  3637. i = (i & 0x55555555) << 1 | (i >> 1) & 0x55555555;
  3638. i = (i & 0x33333333) << 2 | (i >> 2) & 0x33333333;
  3639. i = (i & 0x0f0f0f0f) << 4 | (i >> 4) & 0x0f0f0f0f;
  3640. i = (i << 24) | ((i & 0xff00) << 8) | ((i >> 8) & 0xff00) | (i >> 24);
  3641. return i;
  3642. }
  3643. jint CompressedReadStream::read_signed_int() {
  3644. return decode_sign(read_int());
  3645. }
  3646. jfloat CompressedReadStream::read_float() {
  3647. int rf = read_int();
  3648. int f = reverse_int(rf);
  3649. return jfloat_cast(f);
  3650. }
  3651. jdouble CompressedReadStream::read_double() {
  3652. jint rh = read_int();
  3653. jint rl = read_int();
  3654. jint h = reverse_int(rh);
  3655. jint l = reverse_int(rl);
  3656. return jdouble_cast(jlong_from(h, l));
  3657. }
  3658. jlong CompressedReadStream::read_long() {
  3659. jint low = read_signed_int();
  3660. jint high = read_signed_int();
  3661. return jlong_from(high, low);
  3662. }
  3663. CompressedWriteStream::CompressedWriteStream(int initial_size) : CompressedStream(NULL, 0) {
  3664. _buffer = NEW_RESOURCE_ARRAY(u_char, initial_size);
  3665. _size = initial_size;
  3666. _position = 0;
  3667. }
  3668. void CompressedWriteStream::grow() {
  3669. u_char* _new_buffer = NEW_RESOURCE_ARRAY(u_char, _size * 2);
  3670. memcpy(_new_buffer, _buffer, _position);
  3671. _buffer = _new_buffer;
  3672. _size = _size * 2;
  3673. }
  3674. void CompressedWriteStream::write_signed_int(jint value) {
  3675. write_int(encode_sign(value));
  3676. }
  3677. void CompressedWriteStream::write_float(jfloat value) {
  3678. juint f = jint_cast(value);
  3679. juint rf = reverse_int(f);
  3680. assert(f == reverse_int(rf), "can re-read same bits");
  3681. write_int(rf);
  3682. }
  3683. void CompressedWriteStream::write_double(jdouble value) {
  3684. juint h = high(jlong_cast(value));
  3685. juint l = low( jlong_cast(value));
  3686. juint rh = reverse_int(h);
  3687. juint rl = reverse_int(l);
  3688. assert(h == reverse_int(rh), "can re-read same bits");
  3689. assert(l == reverse_int(rl), "can re-read same bits");
  3690. write_int(rh);
  3691. write_int(rl);
  3692. }
  3693. void CompressedWriteStream::write_long(jlong value) {
  3694. write_signed_int(low(value));
  3695. write_signed_int(high(value));
  3696. }
  3697. #ifndef PRODUCT
  3698. void test_compressed_stream(int trace);
  3699. bool test_compressed_stream_enabled = false;
  3700. #endif
  3701. jint CompressedReadStream::read_int_mb(jint b0) {
  3702. int pos = position() - 1;
  3703. u_char* buf = buffer() + pos;
  3704. assert(buf[0] == b0 && b0 >= L, "correctly called");
  3705. jint sum = b0;
  3706. int lg_H_i = lg_H;
  3707. for (int i = 0; ; ) {
  3708. jint b_i = buf[++i]; // b_i = read(); ++i;
  3709. sum += b_i << lg_H_i; // sum += b[i]*(64**i)
  3710. if (b_i < L || i == MAX_i) {
  3711. set_position(pos+i+1);
  3712. return sum;
  3713. }
  3714. lg_H_i += lg_H;
  3715. }
  3716. }
  3717. void CompressedWriteStream::write_int_mb(jint value) {
  3718. debug_only(int pos1 = position());
  3719. juint sum = value;
  3720. for (int i = 0; ; ) {
  3721. if (sum < L || i == MAX_i) {
  3722. assert(sum == (u_char)sum, "valid byte");
  3723. write((u_char)sum);
  3724. break;
  3725. }
  3726. sum -= L;
  3727. int b_i = L + (sum % H); // this is a "high code"
  3728. sum >>= lg_H; // extracted 6 bits
  3729. write(b_i); ++i;
  3730. }
  3731. #ifndef PRODUCT
  3732. if (test_compressed_stream_enabled) { // hack to enable this stress test
  3733. test_compressed_stream_enabled = false;
  3734. test_compressed_stream(0);
  3735. }
  3736. #endif
  3737. }
  3738. #ifndef PRODUCT
  3739. #if defined(_MSC_VER) && _MSC_VER >=1400 && !defined(_WIN64)
  3740. #pragma optimize("", off)
  3741. #pragma warning(disable: 4748)
  3742. #endif
  3743. enum { stretch_limit = (1<<16) * (64-16+1) };
  3744. static jlong stretch(jint x, int bits) {
  3745. jlong h = (jlong)((x >> (16-4))) << (bits - 4);
  3746. jlong l = ((jlong)x << (64-12)) >> (64-12);
  3747. l <<= (x >> 16);
  3748. return h ^ l;
  3749. }
  3750. PRAGMA_DIAG_PUSH
  3751. PRAGMA_FORMAT_IGNORED // Someone needs to deal with this.
  3752. void test_compressed_stream(int trace) {
  3753. CompressedWriteStream bytes(stretch_limit * 100);
  3754. jint n;
  3755. int step = 0, fails = 0;
  3756. #define CHECKXY(x, y, fmt) { \
  3757. ++step; \
  3758. int xlen = (pos = decode.position()) - lastpos; lastpos = pos; \
  3759. if (trace > 0 && (step % trace) == 0) { \
  3760. tty->print_cr("step %d, n=%08x: value=" fmt " (len=%d)", \
  3761. step, n, x, xlen); } \
  3762. if (x != y) { \
  3763. tty->print_cr("step %d, n=%d: " fmt " != " fmt, step, n, x, y); \
  3764. fails++; \
  3765. } }
  3766. for (n = 0; n < (1<<8); n++) {
  3767. jbyte x = (jbyte)n;
  3768. bytes.write_byte(x); ++step;
  3769. }
  3770. for (n = 0; n < stretch_limit; n++) {
  3771. jint x = (jint)stretch(n, 32);
  3772. bytes.write_int(x); ++step;
  3773. bytes.write_signed_int(x); ++step;
  3774. bytes.write_float(jfloat_cast(x)); ++step;
  3775. }
  3776. for (n = 0; n < stretch_limit; n++) {
  3777. jlong x = stretch(n, 64);
  3778. bytes.write_long(x); ++step;
  3779. bytes.write_double(jdouble_cast(x)); ++step;
  3780. }
  3781. int length = bytes.position();
  3782. if (trace != 0)
  3783. tty->print_cr("set up test of %d stream values, size %d", step, length);
  3784. step = 0;
  3785. CompressedReadStream decode(bytes.buffer());
  3786. int pos, lastpos = decode.position();
  3787. for (n = 0; n < (1<<8); n++) {
  3788. jbyte x = (jbyte)n;
  3789. jbyte y = decode.read_byte();
  3790. CHECKXY(x, y, "%db");
  3791. }
  3792. for (n = 0; n < stretch_limit; n++) {
  3793. jint x = (jint)stretch(n, 32);
  3794. jint y1 = decode.read_int();
  3795. CHECKXY(x, y1, "%du");
  3796. jint y2 = decode.read_signed_int();
  3797. CHECKXY(x, y2, "%di");
  3798. jint y3 = jint_cast(decode.read_float());
  3799. CHECKXY(x, y3, "%df");
  3800. }
  3801. for (n = 0; n < stretch_limit; n++) {
  3802. jlong x = stretch(n, 64);
  3803. jlong y1 = decode.read_long();
  3804. CHECKXY(x, y1, INT64_FORMAT "l");
  3805. jlong y2 = jlong_cast(decode.read_double());
  3806. CHECKXY(x, y2, INT64_FORMAT "d");
  3807. }
  3808. int length2 = decode.position();
  3809. if (trace != 0)
  3810. tty->print_cr("finished test of %d stream values, size %d", step, length2);
  3811. guarantee(length == length2, "bad length");
  3812. guarantee(fails == 0, "test failures");
  3813. }
  3814. PRAGMA_DIAG_POP
  3815. #if defined(_MSC_VER) &&_MSC_VER >=1400 && !defined(_WIN64)
  3816. #pragma warning(default: 4748)
  3817. #pragma optimize("", on)
  3818. #endif
  3819. #endif // PRODUCT
  3820. C:\hotspot-69087d08d473\src\share\vm/code/compressedStream.hpp
  3821. #ifndef SHARE_VM_CODE_COMPRESSEDSTREAM_HPP
  3822. #define SHARE_VM_CODE_COMPRESSEDSTREAM_HPP
  3823. #include "memory/allocation.hpp"
  3824. class CompressedStream : public ResourceObj {
  3825. friend class VMStructs;
  3826. protected:
  3827. u_char* _buffer;
  3828. int _position;
  3829. enum {
  3830. lg_H = 6, H = 1<<lg_H, // number of high codes (64)
  3831. L = (1<<BitsPerByte)-H, // number of low codes (192)
  3832. MAX_i = 4 // bytes are numbered in (0..4), max 5 bytes
  3833. };
  3834. static inline juint encode_sign(jint value); // for Pack200 SIGNED5
  3835. static inline jint decode_sign(juint value); // for Pack200 SIGNED5
  3836. static inline juint reverse_int(juint bits); // to trim trailing float 0's
  3837. public:
  3838. CompressedStream(u_char* buffer, int position = 0) {
  3839. _buffer = buffer;
  3840. _position = position;
  3841. }
  3842. u_char* buffer() const { return _buffer; }
  3843. int position() const { return _position; }
  3844. void set_position(int position) { _position = position; }
  3845. };
  3846. class CompressedReadStream : public CompressedStream {
  3847. private:
  3848. inline u_char read() { return _buffer[_position++]; }
  3849. jint read_int_mb(jint b0); // UNSIGNED5 coding, 2-5 byte cases
  3850. public:
  3851. CompressedReadStream(u_char* buffer, int position = 0)
  3852. : CompressedStream(buffer, position) {}
  3853. jboolean read_bool() { return (jboolean) read(); }
  3854. jbyte read_byte() { return (jbyte ) read(); }
  3855. jchar read_char() { return (jchar ) read_int(); }
  3856. jshort read_short() { return (jshort ) read_signed_int(); }
  3857. jint read_int() { jint b0 = read();
  3858. if (b0 < L) return b0;
  3859. else return read_int_mb(b0);
  3860. }
  3861. jint read_signed_int();
  3862. jfloat read_float(); // jfloat_cast(reverse_int(read_int()))
  3863. jdouble read_double(); // jdouble_cast(2*reverse_int(read_int))
  3864. jlong read_long(); // jlong_from(2*read_signed_int())
  3865. };
  3866. class CompressedWriteStream : public CompressedStream {
  3867. private:
  3868. bool full() {
  3869. return _position >= _size;
  3870. }
  3871. void store(u_char b) {
  3872. _buffer[_position++] = b;
  3873. }
  3874. void write(u_char b) {
  3875. if (full()) grow();
  3876. store(b);
  3877. }
  3878. void grow();
  3879. void write_int_mb(jint value); // UNSIGNED5 coding, 1-5 byte cases
  3880. protected:
  3881. int _size;
  3882. public:
  3883. CompressedWriteStream(int initial_size);
  3884. CompressedWriteStream(u_char* buffer, int initial_size, int position = 0)
  3885. : CompressedStream(buffer, position) { _size = initial_size; }
  3886. void write_bool(jboolean value) { write(value); }
  3887. void write_byte(jbyte value) { write(value); }
  3888. void write_char(jchar value) { write_int(value); }
  3889. void write_short(jshort value) { write_signed_int(value); }
  3890. void write_int(jint value) { if ((juint)value < L && !full())
  3891. store((u_char)value);
  3892. else write_int_mb(value); }
  3893. void write_signed_int(jint value); // write_int(encode_sign(value))
  3894. void write_float(jfloat value); // write_int(reverse_int(jint_cast(v)))
  3895. void write_double(jdouble value); // write_int(reverse_int(<low,high>))
  3896. void write_long(jlong value); // write_signed_int(<low,high>)
  3897. };
  3898. #endif // SHARE_VM_CODE_COMPRESSEDSTREAM_HPP
  3899. C:\hotspot-69087d08d473\src\share\vm/code/debugInfo.cpp
  3900. #include "precompiled.hpp"
  3901. #include "code/debugInfo.hpp"
  3902. #include "code/debugInfoRec.hpp"
  3903. #include "code/nmethod.hpp"
  3904. #include "runtime/handles.inline.hpp"
  3905. #include "runtime/interfaceSupport.hpp"
  3906. #include "runtime/thread.hpp"
  3907. PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
  3908. DebugInfoWriteStream::DebugInfoWriteStream(DebugInformationRecorder* recorder, int initial_size)
  3909. : CompressedWriteStream(initial_size) {
  3910. _recorder = recorder;
  3911. }
  3912. void DebugInfoWriteStream::write_handle(jobject h) {
  3913. write_int(recorder()->oop_recorder()->find_index(h));
  3914. }
  3915. void DebugInfoWriteStream::write_metadata(Metadata* h) {
  3916. write_int(recorder()->oop_recorder()->find_index(h));
  3917. }
  3918. ScopeValue* DebugInfoReadStream::read_object_value() {
  3919. int id = read_int();
  3920. #ifdef ASSERT
  3921. assert(_obj_pool != NULL, "object pool does not exist");
  3922. for (int i = _obj_pool->length() - 1; i >= 0; i--) {
  3923. assert(((ObjectValue*) _obj_pool->at(i))->id() != id, "should not be read twice");
  3924. }
  3925. #endif
  3926. ObjectValue* result = new ObjectValue(id);
  3927. _obj_pool->push(result);
  3928. result->read_object(this);
  3929. return result;
  3930. }
  3931. ScopeValue* DebugInfoReadStream::get_cached_object() {
  3932. int id = read_int();
  3933. assert(_obj_pool != NULL, "object pool does not exist");
  3934. for (int i = _obj_pool->length() - 1; i >= 0; i--) {
  3935. ObjectValue* ov = (ObjectValue*) _obj_pool->at(i);
  3936. if (ov->id() == id) {
  3937. return ov;
  3938. }
  3939. }
  3940. ShouldNotReachHere();
  3941. return NULL;
  3942. }
  3943. enum { LOCATION_CODE = 0, CONSTANT_INT_CODE = 1, CONSTANT_OOP_CODE = 2,
  3944. CONSTANT_LONG_CODE = 3, CONSTANT_DOUBLE_CODE = 4,
  3945. OBJECT_CODE = 5, OBJECT_ID_CODE = 6 };
  3946. ScopeValue* ScopeValue::read_from(DebugInfoReadStream* stream) {
  3947. ScopeValue* result = NULL;
  3948. switch(stream->read_int()) {
  3949. case LOCATION_CODE: result = new LocationValue(stream); break;
  3950. case CONSTANT_INT_CODE: result = new ConstantIntValue(stream); break;
  3951. case CONSTANT_OOP_CODE: result = new ConstantOopReadValue(stream); break;
  3952. case CONSTANT_LONG_CODE: result = new ConstantLongValue(stream); break;
  3953. case CONSTANT_DOUBLE_CODE: result = new ConstantDoubleValue(stream); break;
  3954. case OBJECT_CODE: result = stream->read_object_value(); break;
  3955. case OBJECT_ID_CODE: result = stream->get_cached_object(); break;
  3956. default: ShouldNotReachHere();
  3957. }
  3958. return result;
  3959. }
  3960. LocationValue::LocationValue(DebugInfoReadStream* stream) {
  3961. _location = Location(stream);
  3962. }
  3963. void LocationValue::write_on(DebugInfoWriteStream* stream) {
  3964. stream->write_int(LOCATION_CODE);
  3965. location().write_on(stream);
  3966. }
  3967. void LocationValue::print_on(outputStream* st) const {
  3968. location().print_on(st);
  3969. }
  3970. void ObjectValue::read_object(DebugInfoReadStream* stream) {
  3971. _klass = read_from(stream);
  3972. assert(_klass->is_constant_oop(), "should be constant java mirror oop");
  3973. int length = stream->read_int();
  3974. for (int i = 0; i < length; i++) {
  3975. ScopeValue* val = read_from(stream);
  3976. _field_values.append(val);
  3977. }
  3978. }
  3979. void ObjectValue::write_on(DebugInfoWriteStream* stream) {
  3980. if (_visited) {
  3981. stream->write_int(OBJECT_ID_CODE);
  3982. stream->write_int(_id);
  3983. } else {
  3984. _visited = true;
  3985. stream->write_int(OBJECT_CODE);
  3986. stream->write_int(_id);
  3987. _klass->write_on(stream);
  3988. int length = _field_values.length();
  3989. stream->write_int(length);
  3990. for (int i = 0; i < length; i++) {
  3991. _field_values.at(i)->write_on(stream);
  3992. }
  3993. }
  3994. }
  3995. void ObjectValue::print_on(outputStream* st) const {
  3996. st->print("obj[%d]", _id);
  3997. }
  3998. void ObjectValue::print_fields_on(outputStream* st) const {
  3999. #ifndef PRODUCT
  4000. if (_field_values.length() > 0) {
  4001. _field_values.at(0)->print_on(st);
  4002. }
  4003. for (int i = 1; i < _field_values.length(); i++) {
  4004. st->print(", ");
  4005. _field_values.at(i)->print_on(st);
  4006. }
  4007. #endif
  4008. }
  4009. ConstantIntValue::ConstantIntValue(DebugInfoReadStream* stream) {
  4010. _value = stream->read_signed_int();
  4011. }
  4012. void ConstantIntValue::write_on(DebugInfoWriteStream* stream) {
  4013. stream->write_int(CONSTANT_INT_CODE);
  4014. stream->write_signed_int(value());
  4015. }
  4016. void ConstantIntValue::print_on(outputStream* st) const {
  4017. st->print("%d", value());
  4018. }
  4019. ConstantLongValue::ConstantLongValue(DebugInfoReadStream* stream) {
  4020. _value = stream->read_long();
  4021. }
  4022. void ConstantLongValue::write_on(DebugInfoWriteStream* stream) {
  4023. stream->write_int(CONSTANT_LONG_CODE);
  4024. stream->write_long(value());
  4025. }
  4026. void ConstantLongValue::print_on(outputStream* st) const {
  4027. st->print(INT64_FORMAT, value());
  4028. }
  4029. ConstantDoubleValue::ConstantDoubleValue(DebugInfoReadStream* stream) {
  4030. _value = stream->read_double();
  4031. }
  4032. void ConstantDoubleValue::write_on(DebugInfoWriteStream* stream) {
  4033. stream->write_int(CONSTANT_DOUBLE_CODE);
  4034. stream->write_double(value());
  4035. }
  4036. void ConstantDoubleValue::print_on(outputStream* st) const {
  4037. st->print("%f", value());
  4038. }
  4039. void ConstantOopWriteValue::write_on(DebugInfoWriteStream* stream) {
  4040. #ifdef ASSERT
  4041. {
  4042. ThreadInVMfromUnknown tiv;
  4043. assert(JNIHandles::resolve(value()) == NULL ||
  4044. Universe::heap()->is_in_reserved(JNIHandles::resolve(value())),
  4045. "Should be in heap");
  4046. }
  4047. #endif
  4048. stream->write_int(CONSTANT_OOP_CODE);
  4049. stream->write_handle(value());
  4050. }
  4051. void ConstantOopWriteValue::print_on(outputStream* st) const {
  4052. ThreadInVMfromUnknown tiv;
  4053. JNIHandles::resolve(value())->print_value_on(st);
  4054. }
  4055. ConstantOopReadValue::ConstantOopReadValue(DebugInfoReadStream* stream) {
  4056. _value = Handle(stream->read_oop());
  4057. assert(_value() == NULL ||
  4058. Universe::heap()->is_in_reserved(_value()), "Should be in heap");
  4059. }
  4060. void ConstantOopReadValue::write_on(DebugInfoWriteStream* stream) {
  4061. ShouldNotReachHere();
  4062. }
  4063. void ConstantOopReadValue::print_on(outputStream* st) const {
  4064. value()()->print_value_on(st);
  4065. }
  4066. MonitorValue::MonitorValue(ScopeValue* owner, Location basic_lock, bool eliminated) {
  4067. _owner = owner;
  4068. _basic_lock = basic_lock;
  4069. _eliminated = eliminated;
  4070. }
  4071. MonitorValue::MonitorValue(DebugInfoReadStream* stream) {
  4072. _basic_lock = Location(stream);
  4073. _owner = ScopeValue::read_from(stream);
  4074. _eliminated = (stream->read_bool() != 0);
  4075. }
  4076. void MonitorValue::write_on(DebugInfoWriteStream* stream) {
  4077. _basic_lock.write_on(stream);
  4078. _owner->write_on(stream);
  4079. stream->write_bool(_eliminated);
  4080. }
  4081. #ifndef PRODUCT
  4082. void MonitorValue::print_on(outputStream* st) const {
  4083. st->print("monitor{");
  4084. owner()->print_on(st);
  4085. st->print(",");
  4086. basic_lock().print_on(st);
  4087. st->print("}");
  4088. if (_eliminated) {
  4089. st->print(" (eliminated)");
  4090. }
  4091. }
  4092. #endif
  4093. C:\hotspot-69087d08d473\src\share\vm/code/debugInfo.hpp
  4094. #ifndef SHARE_VM_CODE_DEBUGINFO_HPP
  4095. #define SHARE_VM_CODE_DEBUGINFO_HPP
  4096. #include "code/compressedStream.hpp"
  4097. #include "code/location.hpp"
  4098. #include "code/nmethod.hpp"
  4099. #include "code/oopRecorder.hpp"
  4100. #include "runtime/stackValue.hpp"
  4101. #include "utilities/growableArray.hpp"
  4102. class ConstantOopReadValue;
  4103. class ScopeValue: public ResourceObj {
  4104. public:
  4105. virtual bool is_location() const { return false; }
  4106. virtual bool is_object() const { return false; }
  4107. virtual bool is_constant_int() const { return false; }
  4108. virtual bool is_constant_double() const { return false; }
  4109. virtual bool is_constant_long() const { return false; }
  4110. virtual bool is_constant_oop() const { return false; }
  4111. virtual bool equals(ScopeValue* other) const { return false; }
  4112. ConstantOopReadValue* as_ConstantOopReadValue() {
  4113. assert(is_constant_oop(), "must be");
  4114. return (ConstantOopReadValue*) this;
  4115. }
  4116. virtual void write_on(DebugInfoWriteStream* stream) = 0;
  4117. static ScopeValue* read_from(DebugInfoReadStream* stream);
  4118. };
  4119. class LocationValue: public ScopeValue {
  4120. private:
  4121. Location _location;
  4122. public:
  4123. LocationValue(Location location) { _location = location; }
  4124. bool is_location() const { return true; }
  4125. Location location() const { return _location; }
  4126. LocationValue(DebugInfoReadStream* stream);
  4127. void write_on(DebugInfoWriteStream* stream);
  4128. void print_on(outputStream* st) const;
  4129. };
  4130. class ObjectValue: public ScopeValue {
  4131. private:
  4132. int _id;
  4133. ScopeValue* _klass;
  4134. GrowableArray<ScopeValue*> _field_values;
  4135. Handle _value;
  4136. bool _visited;
  4137. public:
  4138. ObjectValue(int id, ScopeValue* klass)
  4139. : _id(id)
  4140. , _klass(klass)
  4141. , _field_values()
  4142. , _value()
  4143. , _visited(false) {
  4144. assert(klass->is_constant_oop(), "should be constant java mirror oop");
  4145. }
  4146. ObjectValue(int id)
  4147. : _id(id)
  4148. , _klass(NULL)
  4149. , _field_values()
  4150. , _value()
  4151. , _visited(false) {}
  4152. bool is_object() const { return true; }
  4153. int id() const { return _id; }
  4154. ScopeValue* klass() const { return _klass; }
  4155. GrowableArray<ScopeValue*>* field_values() { return &_field_values; }
  4156. ScopeValue* field_at(int i) const { return _field_values.at(i); }
  4157. int field_size() { return _field_values.length(); }
  4158. Handle value() const { return _value; }
  4159. bool is_visited() const { return _visited; }
  4160. void set_value(oop value) { _value = Handle(value); }
  4161. void set_visited(bool visited) { _visited = false; }
  4162. void read_object(DebugInfoReadStream* stream);
  4163. void write_on(DebugInfoWriteStream* stream);
  4164. void print_on(outputStream* st) const;
  4165. void print_fields_on(outputStream* st) const;
  4166. };
  4167. class ConstantIntValue: public ScopeValue {
  4168. private:
  4169. jint _value;
  4170. public:
  4171. ConstantIntValue(jint value) { _value = value; }
  4172. jint value() const { return _value; }
  4173. bool is_constant_int() const { return true; }
  4174. bool equals(ScopeValue* other) const { return false; }
  4175. ConstantIntValue(DebugInfoReadStream* stream);
  4176. void write_on(DebugInfoWriteStream* stream);
  4177. void print_on(outputStream* st) const;
  4178. };
  4179. class ConstantLongValue: public ScopeValue {
  4180. private:
  4181. jlong _value;
  4182. public:
  4183. ConstantLongValue(jlong value) { _value = value; }
  4184. jlong value() const { return _value; }
  4185. bool is_constant_long() const { return true; }
  4186. bool equals(ScopeValue* other) const { return false; }
  4187. ConstantLongValue(DebugInfoReadStream* stream);
  4188. void write_on(DebugInfoWriteStream* stream);
  4189. void print_on(outputStream* st) const;
  4190. };
  4191. class ConstantDoubleValue: public ScopeValue {
  4192. private:
  4193. jdouble _value;
  4194. public:
  4195. ConstantDoubleValue(jdouble value) { _value = value; }
  4196. jdouble value() const { return _value; }
  4197. bool is_constant_double() const { return true; }
  4198. bool equals(ScopeValue* other) const { return false; }
  4199. ConstantDoubleValue(DebugInfoReadStream* stream);
  4200. void write_on(DebugInfoWriteStream* stream);
  4201. void print_on(outputStream* st) const;
  4202. };
  4203. class ConstantOopWriteValue: public ScopeValue {
  4204. private:
  4205. jobject _value;
  4206. public:
  4207. ConstantOopWriteValue(jobject value) { _value = value; }
  4208. jobject value() const { return _value; }
  4209. bool is_constant_oop() const { return true; }
  4210. bool equals(ScopeValue* other) const { return false; }
  4211. void write_on(DebugInfoWriteStream* stream);
  4212. void print_on(outputStream* st) const;
  4213. };
  4214. class ConstantOopReadValue: public ScopeValue {
  4215. private:
  4216. Handle _value;
  4217. public:
  4218. Handle value() const { return _value; }
  4219. bool is_constant_oop() const { return true; }
  4220. bool equals(ScopeValue* other) const { return false; }
  4221. ConstantOopReadValue(DebugInfoReadStream* stream);
  4222. void write_on(DebugInfoWriteStream* stream);
  4223. void print_on(outputStream* st) const;
  4224. };
  4225. class MonitorValue: public ResourceObj {
  4226. private:
  4227. ScopeValue* _owner;
  4228. Location _basic_lock;
  4229. bool _eliminated;
  4230. public:
  4231. MonitorValue(ScopeValue* owner, Location basic_lock, bool eliminated = false);
  4232. ScopeValue* owner() const { return _owner; }
  4233. Location basic_lock() const { return _basic_lock; }
  4234. bool eliminated() const { return _eliminated; }
  4235. MonitorValue(DebugInfoReadStream* stream);
  4236. void write_on(DebugInfoWriteStream* stream);
  4237. void print_on(outputStream* st) const;
  4238. };
  4239. class DebugInfoReadStream : public CompressedReadStream {
  4240. private:
  4241. const nmethod* _code;
  4242. const nmethod* code() const { return _code; }
  4243. GrowableArray<ScopeValue*>* _obj_pool;
  4244. public:
  4245. DebugInfoReadStream(const nmethod* code, int offset, GrowableArray<ScopeValue*>* obj_pool = NULL) :
  4246. CompressedReadStream(code->scopes_data_begin(), offset) {
  4247. _code = code;
  4248. _obj_pool = obj_pool;
  4249. } ;
  4250. oop read_oop() {
  4251. oop o = code()->oop_at(read_int());
  4252. assert(o == NULL || o->is_oop(), "oop only");
  4253. return o;
  4254. }
  4255. Method* read_method() {
  4256. Method* o = (Method*)(code()->metadata_at(read_int()));
  4257. assert(o == NULL || o->is_metadata(), "meta data only");
  4258. return o;
  4259. }
  4260. ScopeValue* read_object_value();
  4261. ScopeValue* get_cached_object();
  4262. int read_bci() { return read_int() + InvocationEntryBci; }
  4263. };
  4264. class DebugInfoWriteStream : public CompressedWriteStream {
  4265. private:
  4266. DebugInformationRecorder* _recorder;
  4267. DebugInformationRecorder* recorder() const { return _recorder; }
  4268. public:
  4269. DebugInfoWriteStream(DebugInformationRecorder* recorder, int initial_size);
  4270. void write_handle(jobject h);
  4271. void write_bci(int bci) { write_int(bci - InvocationEntryBci); }
  4272. void write_metadata(Metadata* m);
  4273. };
  4274. #endif // SHARE_VM_CODE_DEBUGINFO_HPP
  4275. C:\hotspot-69087d08d473\src\share\vm/code/debugInfoRec.cpp
  4276. #include "precompiled.hpp"
  4277. #include "code/debugInfoRec.hpp"
  4278. #include "code/scopeDesc.hpp"
  4279. #include "prims/jvmtiExport.hpp"
  4280. class DIR_Chunk {
  4281. friend class DebugInformationRecorder;
  4282. int _offset; // location in the stream of this scope
  4283. int _length; // number of bytes in the stream
  4284. int _hash; // hash of stream bytes (for quicker reuse)
  4285. void* operator new(size_t ignore, DebugInformationRecorder* dir) throw() {
  4286. assert(ignore == sizeof(DIR_Chunk), "");
  4287. if (dir->_next_chunk >= dir->_next_chunk_limit) {
  4288. const int CHUNK = 100;
  4289. dir->_next_chunk = NEW_RESOURCE_ARRAY(DIR_Chunk, CHUNK);
  4290. dir->_next_chunk_limit = dir->_next_chunk + CHUNK;
  4291. }
  4292. return dir->_next_chunk++;
  4293. }
  4294. DIR_Chunk(int offset, int length, DebugInformationRecorder* dir) {
  4295. _offset = offset;
  4296. _length = length;
  4297. unsigned int hash = 0;
  4298. address p = dir->stream()->buffer() + _offset;
  4299. for (int i = 0; i < length; i++) {
  4300. if (i == 6) break;
  4301. hash *= 127;
  4302. hash += p[i];
  4303. }
  4304. _hash = hash;
  4305. }
  4306. DIR_Chunk* find_match(GrowableArray<DIR_Chunk*>* arr,
  4307. int start_index,
  4308. DebugInformationRecorder* dir) {
  4309. int end_index = arr->length();
  4310. int hash = this->_hash, length = this->_length;
  4311. address buf = dir->stream()->buffer();
  4312. for (int i = end_index; --i >= start_index; ) {
  4313. DIR_Chunk* that = arr->at(i);
  4314. if (hash == that->_hash &&
  4315. length == that->_length &&
  4316. 0 == memcmp(buf + this->_offset, buf + that->_offset, length)) {
  4317. return that;
  4318. }
  4319. }
  4320. return NULL;
  4321. }
  4322. };
  4323. static inline bool compute_recording_non_safepoints() {
  4324. if (JvmtiExport::should_post_compiled_method_load()
  4325. && FLAG_IS_DEFAULT(DebugNonSafepoints)) {
  4326. return true;
  4327. }
  4328. return DebugNonSafepoints;
  4329. }
  4330. DebugInformationRecorder::DebugInformationRecorder(OopRecorder* oop_recorder)
  4331. : _recording_non_safepoints(compute_recording_non_safepoints())
  4332. {
  4333. _pcs_size = 100;
  4334. _pcs = NEW_RESOURCE_ARRAY(PcDesc, _pcs_size);
  4335. _pcs_length = 0;
  4336. _prev_safepoint_pc = PcDesc::lower_offset_limit;
  4337. _stream = new DebugInfoWriteStream(this, 10 * K);
  4338. _stream->write_byte((jbyte)0xFF);
  4339. assert(_stream->position() > serialized_null, "sanity");
  4340. _oop_recorder = oop_recorder;
  4341. _all_chunks = new GrowableArray<DIR_Chunk*>(300);
  4342. _shared_chunks = new GrowableArray<DIR_Chunk*>(30);
  4343. _next_chunk = _next_chunk_limit = NULL;
  4344. add_new_pc_offset(PcDesc::lower_offset_limit); // sentinel record
  4345. debug_only(_recording_state = rs_null);
  4346. }
  4347. void DebugInformationRecorder::add_oopmap(int pc_offset, OopMap* map) {
  4348. _oopmaps->add_gc_map(pc_offset, map);
  4349. }
  4350. void DebugInformationRecorder::add_safepoint(int pc_offset, OopMap* map) {
  4351. assert(!_oop_recorder->is_complete(), "not frozen yet");
  4352. add_oopmap(pc_offset, map);
  4353. add_new_pc_offset(pc_offset);
  4354. assert(_recording_state == rs_null, "nesting of recording calls");
  4355. debug_only(_recording_state = rs_safepoint);
  4356. }
  4357. void DebugInformationRecorder::add_non_safepoint(int pc_offset) {
  4358. assert(!_oop_recorder->is_complete(), "not frozen yet");
  4359. assert(_recording_non_safepoints, "must be recording non-safepoints");
  4360. add_new_pc_offset(pc_offset);
  4361. assert(_recording_state == rs_null, "nesting of recording calls");
  4362. debug_only(_recording_state = rs_non_safepoint);
  4363. }
  4364. void DebugInformationRecorder::add_new_pc_offset(int pc_offset) {
  4365. assert(_pcs_length == 0 || last_pc()->pc_offset() < pc_offset,
  4366. "must specify a new, larger pc offset");
  4367. if (_pcs_length == _pcs_size) {
  4368. int new_pcs_size = _pcs_size * 2;
  4369. PcDesc* new_pcs = NEW_RESOURCE_ARRAY(PcDesc, new_pcs_size);
  4370. for (int index = 0; index < _pcs_length; index++) {
  4371. new_pcs[index] = _pcs[index];
  4372. }
  4373. _pcs_size = new_pcs_size;
  4374. _pcs = new_pcs;
  4375. }
  4376. assert(_pcs_size > _pcs_length, "There must be room for after expanding");
  4377. _pcs[_pcs_length++] = PcDesc(pc_offset, DebugInformationRecorder::serialized_null,
  4378. DebugInformationRecorder::serialized_null);
  4379. }
  4380. int DebugInformationRecorder::serialize_monitor_values(GrowableArray<MonitorValue*>* monitors) {
  4381. if (monitors == NULL || monitors->is_empty()) return DebugInformationRecorder::serialized_null;
  4382. assert(_recording_state == rs_safepoint, "must be recording a safepoint");
  4383. int result = stream()->position();
  4384. stream()->write_int(monitors->length());
  4385. for (int index = 0; index < monitors->length(); index++) {
  4386. monitors->at(index)->write_on(stream());
  4387. }
  4388. assert(result != serialized_null, "sanity");
  4389. int shared_result = find_sharable_decode_offset(result);
  4390. if (shared_result != serialized_null) {
  4391. stream()->set_position(result);
  4392. result = shared_result;
  4393. }
  4394. return result;
  4395. }
  4396. int DebugInformationRecorder::serialize_scope_values(GrowableArray<ScopeValue*>* values) {
  4397. if (values == NULL || values->is_empty()) return DebugInformationRecorder::serialized_null;
  4398. assert(_recording_state == rs_safepoint, "must be recording a safepoint");
  4399. int result = stream()->position();
  4400. assert(result != serialized_null, "sanity");
  4401. stream()->write_int(values->length());
  4402. for (int index = 0; index < values->length(); index++) {
  4403. values->at(index)->write_on(stream());
  4404. }
  4405. int shared_result = find_sharable_decode_offset(result);
  4406. if (shared_result != serialized_null) {
  4407. stream()->set_position(result);
  4408. result = shared_result;
  4409. }
  4410. return result;
  4411. }
  4412. #ifndef PRODUCT
  4413. static
  4414. struct dir_stats_struct {
  4415. int chunks_queried;
  4416. int chunks_shared;
  4417. int chunks_reshared;
  4418. int chunks_elided;
  4419. void print() {
  4420. tty->print_cr("Debug Data Chunks: %d, shared %d+%d, non-SP's elided %d",
  4421. chunks_queried,
  4422. chunks_shared, chunks_reshared,
  4423. chunks_elided);
  4424. }
  4425. } dir_stats;
  4426. #endif //PRODUCT
  4427. int DebugInformationRecorder::find_sharable_decode_offset(int stream_offset) {
  4428. if (!recording_non_safepoints())
  4429. return serialized_null;
  4430. NOT_PRODUCT(++dir_stats.chunks_queried);
  4431. int stream_length = stream()->position() - stream_offset;
  4432. assert(stream_offset != serialized_null, "should not be null");
  4433. assert(stream_length != 0, "should not be empty");
  4434. DIR_Chunk* ns = new(this) DIR_Chunk(stream_offset, stream_length, this);
  4435. DIR_Chunk* ms = ns->find_match(_shared_chunks, 0, this);
  4436. if (ms != NULL) {
  4437. NOT_PRODUCT(++dir_stats.chunks_reshared);
  4438. assert(ns+1 == _next_chunk, "");
  4439. _next_chunk = ns;
  4440. return ms->_offset;
  4441. }
  4442. const int MAX_RECENT = 50;
  4443. int start_index = _all_chunks->length() - MAX_RECENT;
  4444. if (start_index < 0) start_index = 0;
  4445. ms = ns->find_match(_all_chunks, start_index, this);
  4446. if (ms != NULL) {
  4447. NOT_PRODUCT(++dir_stats.chunks_shared);
  4448. _shared_chunks->append(ms);
  4449. assert(ns+1 == _next_chunk, "");
  4450. _next_chunk = ns;
  4451. return ms->_offset;
  4452. }
  4453. _all_chunks->append(ns);
  4454. return serialized_null;
  4455. }
  4456. void DebugInformationRecorder::describe_scope(int pc_offset,
  4457. ciMethod* method,
  4458. int bci,
  4459. bool reexecute,
  4460. bool is_method_handle_invoke,
  4461. bool return_oop,
  4462. DebugToken* locals,
  4463. DebugToken* expressions,
  4464. DebugToken* monitors) {
  4465. assert(_recording_state != rs_null, "nesting of recording calls");
  4466. PcDesc* last_pd = last_pc();
  4467. assert(last_pd->pc_offset() == pc_offset, "must be last pc");
  4468. int sender_stream_offset = last_pd->scope_decode_offset();
  4469. int stream_offset = stream()->position();
  4470. last_pd->set_scope_decode_offset(stream_offset);
  4471. last_pd->set_should_reexecute(reexecute);
  4472. last_pd->set_is_method_handle_invoke(is_method_handle_invoke);
  4473. last_pd->set_return_oop(return_oop);
  4474. stream()->write_int(sender_stream_offset);
  4475. Metadata* method_enc = (method == NULL)? NULL: method->constant_encoding();
  4476. stream()->write_int(oop_recorder()->find_index(method_enc));
  4477. stream()->write_bci(bci);
  4478. assert(method == NULL ||
  4479. (method->is_native() && bci == 0) ||
  4480. (!method->is_native() && 0 <= bci && bci < method->code_size()) ||
  4481. (method->is_compiled_lambda_form() && bci == -99) || // this might happen in C1
  4482. bci == -1, "illegal bci");
  4483. stream()->write_int((intptr_t) locals);
  4484. stream()->write_int((intptr_t) expressions);
  4485. stream()->write_int((intptr_t) monitors);
  4486. int shared_stream_offset = find_sharable_decode_offset(stream_offset);
  4487. if (shared_stream_offset != serialized_null) {
  4488. stream()->set_position(stream_offset);
  4489. last_pd->set_scope_decode_offset(shared_stream_offset);
  4490. }
  4491. }
  4492. void DebugInformationRecorder::dump_object_pool(GrowableArray<ScopeValue*>* objects) {
  4493. guarantee( _pcs_length > 0, "safepoint must exist before describing scopes");
  4494. PcDesc* last_pd = &_pcs[_pcs_length-1];
  4495. if (objects != NULL) {
  4496. for (int i = objects->length() - 1; i >= 0; i--) {
  4497. ((ObjectValue*) objects->at(i))->set_visited(false);
  4498. }
  4499. }
  4500. int offset = serialize_scope_values(objects);
  4501. last_pd->set_obj_decode_offset(offset);
  4502. }
  4503. void DebugInformationRecorder::end_scopes(int pc_offset, bool is_safepoint) {
  4504. assert(_recording_state == (is_safepoint? rs_safepoint: rs_non_safepoint),
  4505. "nesting of recording calls");
  4506. debug_only(_recording_state = rs_null);
  4507. if (_pcs_length >= 2 && recording_non_safepoints()) {
  4508. PcDesc* last = last_pc();
  4509. PcDesc* prev = prev_pc();
  4510. if (_prev_safepoint_pc < prev->pc_offset() && prev->is_same_info(last)) {
  4511. assert(prev == last-1, "sane");
  4512. prev->set_pc_offset(pc_offset);
  4513. _pcs_length -= 1;
  4514. NOT_PRODUCT(++dir_stats.chunks_elided);
  4515. }
  4516. }
  4517. if (is_safepoint) {
  4518. _prev_safepoint_pc = pc_offset;
  4519. }
  4520. }
  4521. #ifdef ASSERT
  4522. bool DebugInformationRecorder::recorders_frozen() {
  4523. return _oop_recorder->is_complete() || _oop_recorder->is_complete();
  4524. }
  4525. void DebugInformationRecorder::mark_recorders_frozen() {
  4526. _oop_recorder->freeze();
  4527. }
  4528. #endif // PRODUCT
  4529. DebugToken* DebugInformationRecorder::create_scope_values(GrowableArray<ScopeValue*>* values) {
  4530. assert(!recorders_frozen(), "not frozen yet");
  4531. return (DebugToken*) (intptr_t) serialize_scope_values(values);
  4532. }
  4533. DebugToken* DebugInformationRecorder::create_monitor_values(GrowableArray<MonitorValue*>* monitors) {
  4534. assert(!recorders_frozen(), "not frozen yet");
  4535. return (DebugToken*) (intptr_t) serialize_monitor_values(monitors);
  4536. }
  4537. int DebugInformationRecorder::data_size() {
  4538. debug_only(mark_recorders_frozen()); // mark it "frozen" for asserts
  4539. return _stream->position();
  4540. }
  4541. int DebugInformationRecorder::pcs_size() {
  4542. debug_only(mark_recorders_frozen()); // mark it "frozen" for asserts
  4543. if (last_pc()->pc_offset() != PcDesc::upper_offset_limit)
  4544. add_new_pc_offset(PcDesc::upper_offset_limit);
  4545. return _pcs_length * sizeof(PcDesc);
  4546. }
  4547. void DebugInformationRecorder::copy_to(nmethod* nm) {
  4548. nm->copy_scopes_data(stream()->buffer(), stream()->position());
  4549. nm->copy_scopes_pcs(_pcs, _pcs_length);
  4550. }
  4551. void DebugInformationRecorder::verify(const nmethod* code) {
  4552. Unimplemented();
  4553. }
  4554. #ifndef PRODUCT
  4555. void DebugInformationRecorder::print_statistics() {
  4556. dir_stats.print();
  4557. }
  4558. #endif //PRODUCT
  4559. C:\hotspot-69087d08d473\src\share\vm/code/debugInfoRec.hpp
  4560. #ifndef SHARE_VM_CODE_DEBUGINFOREC_HPP
  4561. #define SHARE_VM_CODE_DEBUGINFOREC_HPP
  4562. #include "ci/ciClassList.hpp"
  4563. #include "ci/ciInstanceKlass.hpp"
  4564. #include "ci/ciMethod.hpp"
  4565. #include "code/debugInfo.hpp"
  4566. #include "code/location.hpp"
  4567. #include "code/pcDesc.hpp"
  4568. #include "compiler/oopMap.hpp"
  4569. #include "oops/oop.hpp"
  4570. #include "utilities/growableArray.hpp"
  4571. class DebugToken; // Opaque datatype for stored:
  4572. const int SynchronizationEntryBCI = InvocationEntryBci;
  4573. class DIR_Chunk; // private class, a nugget of collected information
  4574. class DebugInformationRecorder: public ResourceObj {
  4575. public:
  4576. DebugInformationRecorder(OopRecorder* oop_recorder);
  4577. void add_oopmap(int pc_offset, OopMap* map);
  4578. void add_safepoint(int pc_offset, OopMap* map);
  4579. void add_non_safepoint(int pc_offset);
  4580. void describe_scope(int pc_offset,
  4581. ciMethod* method,
  4582. int bci,
  4583. bool reexecute,
  4584. bool is_method_handle_invoke = false,
  4585. bool return_oop = false,
  4586. DebugToken* locals = NULL,
  4587. DebugToken* expressions = NULL,
  4588. DebugToken* monitors = NULL);
  4589. void dump_object_pool(GrowableArray<ScopeValue*>* objects);
  4590. void end_safepoint(int pc_offset) { end_scopes(pc_offset, true); }
  4591. void end_non_safepoint(int pc_offset) { end_scopes(pc_offset, false); }
  4592. DebugToken* create_scope_values(GrowableArray<ScopeValue*>* values);
  4593. DebugToken* create_monitor_values(GrowableArray<MonitorValue*>* monitors);
  4594. int data_size();
  4595. int pcs_size();
  4596. int oop_size() { return oop_recorder()->oop_size(); }
  4597. int metadata_size() { return oop_recorder()->metadata_size(); }
  4598. void copy_to(nmethod* nm);
  4599. void verify(const nmethod* code);
  4600. static void print_statistics() PRODUCT_RETURN;
  4601. OopMapSet *_oopmaps;
  4602. void set_oopmaps(OopMapSet *oopmaps) { _oopmaps = oopmaps; }
  4603. OopRecorder* oop_recorder() { return _oop_recorder; }
  4604. int last_pc_offset() { return last_pc()->pc_offset(); }
  4605. bool recording_non_safepoints() { return _recording_non_safepoints; }
  4606. private:
  4607. friend class ScopeDesc;
  4608. friend class vframeStreamCommon;
  4609. friend class DIR_Chunk;
  4610. const bool _recording_non_safepoints;
  4611. DebugInfoWriteStream* _stream;
  4612. DebugInfoWriteStream* stream() const { return _stream; }
  4613. OopRecorder* _oop_recorder;
  4614. GrowableArray<DIR_Chunk*>* _all_chunks;
  4615. GrowableArray<DIR_Chunk*>* _shared_chunks;
  4616. DIR_Chunk* _next_chunk;
  4617. DIR_Chunk* _next_chunk_limit;
  4618. #ifdef ASSERT
  4619. enum { rs_null, rs_safepoint, rs_non_safepoint };
  4620. int _recording_state;
  4621. #endif
  4622. PcDesc* _pcs;
  4623. int _pcs_size;
  4624. int _pcs_length;
  4625. int _prev_safepoint_pc;
  4626. PcDesc* last_pc() {
  4627. guarantee(_pcs_length > 0, "a safepoint must be declared already");
  4628. return &_pcs[_pcs_length-1];
  4629. }
  4630. PcDesc* prev_pc() {
  4631. guarantee(_pcs_length > 1, "a safepoint must be declared already");
  4632. return &_pcs[_pcs_length-2];
  4633. }
  4634. void add_new_pc_offset(int pc_offset);
  4635. void end_scopes(int pc_offset, bool is_safepoint);
  4636. int serialize_monitor_values(GrowableArray<MonitorValue*>* monitors);
  4637. int serialize_scope_values(GrowableArray<ScopeValue*>* values);
  4638. int find_sharable_decode_offset(int stream_offset);
  4639. #ifndef PRODUCT
  4640. bool recorders_frozen();
  4641. void mark_recorders_frozen();
  4642. #endif // PRODUCT
  4643. public:
  4644. enum { serialized_null = 0 };
  4645. };
  4646. #endif // SHARE_VM_CODE_DEBUGINFOREC_HPP
  4647. C:\hotspot-69087d08d473\src\share\vm/code/dependencies.cpp
  4648. #include "precompiled.hpp"
  4649. #include "ci/ciArrayKlass.hpp"
  4650. #include "ci/ciEnv.hpp"
  4651. #include "ci/ciKlass.hpp"
  4652. #include "ci/ciMethod.hpp"
  4653. #include "code/dependencies.hpp"
  4654. #include "compiler/compileLog.hpp"
  4655. #include "oops/klass.hpp"
  4656. #include "oops/oop.inline.hpp"
  4657. #include "runtime/handles.hpp"
  4658. #include "runtime/handles.inline.hpp"
  4659. #include "runtime/thread.inline.hpp"
  4660. #include "utilities/copy.hpp"
  4661. #ifdef ASSERT
  4662. static bool must_be_in_vm() {
  4663. Thread* thread = Thread::current();
  4664. if (thread->is_Java_thread())
  4665. return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
  4666. else
  4667. return true; //something like this: thread->is_VM_thread();
  4668. }
  4669. #endif //ASSERT
  4670. void Dependencies::initialize(ciEnv* env) {
  4671. Arena* arena = env->arena();
  4672. _oop_recorder = env->oop_recorder();
  4673. _log = env->log();
  4674. _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
  4675. DEBUG_ONLY(_deps[end_marker] = NULL);
  4676. for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
  4677. _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
  4678. }
  4679. _content_bytes = NULL;
  4680. _size_in_bytes = (size_t)-1;
  4681. assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
  4682. }
  4683. void Dependencies::assert_evol_method(ciMethod* m) {
  4684. assert_common_1(evol_method, m);
  4685. }
  4686. void Dependencies::assert_leaf_type(ciKlass* ctxk) {
  4687. if (ctxk->is_array_klass()) {
  4688. ciType* elemt = ctxk->as_array_klass()->base_element_type();
  4689. if (!elemt->is_instance_klass()) return; // Ex: int[][]
  4690. ctxk = elemt->as_instance_klass();
  4691. }
  4692. check_ctxk(ctxk);
  4693. assert_common_1(leaf_type, ctxk);
  4694. }
  4695. void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
  4696. check_ctxk_abstract(ctxk);
  4697. assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
  4698. }
  4699. void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
  4700. check_ctxk_abstract(ctxk);
  4701. assert_common_1(abstract_with_no_concrete_subtype, ctxk);
  4702. }
  4703. void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
  4704. check_ctxk_concrete(ctxk);
  4705. assert_common_1(concrete_with_no_concrete_subtype, ctxk);
  4706. }
  4707. void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
  4708. check_ctxk(ctxk);
  4709. assert_common_2(unique_concrete_method, ctxk, uniqm);
  4710. }
  4711. void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
  4712. check_ctxk(ctxk);
  4713. assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
  4714. }
  4715. void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
  4716. check_ctxk(ctxk);
  4717. assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
  4718. }
  4719. void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
  4720. check_ctxk(ctxk);
  4721. assert_common_1(no_finalizable_subclasses, ctxk);
  4722. }
  4723. void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
  4724. check_ctxk(call_site->klass());
  4725. assert_common_2(call_site_target_value, call_site, method_handle);
  4726. }
  4727. bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
  4728. int ctxk_i, ciKlass* ctxk2) {
  4729. ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
  4730. if (ctxk2->is_subtype_of(ctxk1)) {
  4731. return true; // success, and no need to change
  4732. } else if (ctxk1->is_subtype_of(ctxk2)) {
  4733. deps->at_put(ctxk_i, ctxk2);
  4734. return true;
  4735. } else {
  4736. return false;
  4737. }
  4738. }
  4739. void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
  4740. assert(dep_args(dept) == 1, "sanity");
  4741. log_dependency(dept, x);
  4742. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4743. if (note_dep_seen(dept, x)) {
  4744. assert(deps->find(x) >= 0, "sanity");
  4745. } else {
  4746. deps->append(x);
  4747. }
  4748. }
  4749. void Dependencies::assert_common_2(DepType dept,
  4750. ciBaseObject* x0, ciBaseObject* x1) {
  4751. assert(dep_args(dept) == 2, "sanity");
  4752. log_dependency(dept, x0, x1);
  4753. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4754. bool has_ctxk = has_explicit_context_arg(dept);
  4755. if (has_ctxk) {
  4756. assert(dep_context_arg(dept) == 0, "sanity");
  4757. if (note_dep_seen(dept, x1)) {
  4758. const int stride = 2;
  4759. for (int i = deps->length(); (i -= stride) >= 0; ) {
  4760. ciBaseObject* y1 = deps->at(i+1);
  4761. if (x1 == y1) { // same subject; check the context
  4762. if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
  4763. return;
  4764. }
  4765. }
  4766. }
  4767. }
  4768. } else {
  4769. assert(dep_implicit_context_arg(dept) == 0, "sanity");
  4770. if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
  4771. const int stride = 2;
  4772. for (int i = deps->length(); (i -= stride) >= 0; ) {
  4773. ciBaseObject* y0 = deps->at(i+0);
  4774. ciBaseObject* y1 = deps->at(i+1);
  4775. if (x0 == y0 && x1 == y1) {
  4776. return;
  4777. }
  4778. }
  4779. }
  4780. }
  4781. deps->append(x0);
  4782. deps->append(x1);
  4783. }
  4784. void Dependencies::assert_common_3(DepType dept,
  4785. ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
  4786. assert(dep_context_arg(dept) == 0, "sanity");
  4787. assert(dep_args(dept) == 3, "sanity");
  4788. log_dependency(dept, ctxk, x, x2);
  4789. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4790. bool swap = false;
  4791. switch (dept) {
  4792. case abstract_with_exclusive_concrete_subtypes_2:
  4793. swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
  4794. break;
  4795. case exclusive_concrete_methods_2:
  4796. swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
  4797. break;
  4798. }
  4799. if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
  4800. if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
  4801. const int stride = 3;
  4802. for (int i = deps->length(); (i -= stride) >= 0; ) {
  4803. ciBaseObject* y = deps->at(i+1);
  4804. ciBaseObject* y2 = deps->at(i+2);
  4805. if (x == y && x2 == y2) { // same subjects; check the context
  4806. if (maybe_merge_ctxk(deps, i+0, ctxk)) {
  4807. return;
  4808. }
  4809. }
  4810. }
  4811. }
  4812. deps->append(ctxk);
  4813. deps->append(x);
  4814. deps->append(x2);
  4815. }
  4816. void Dependencies::copy_to(nmethod* nm) {
  4817. address beg = nm->dependencies_begin();
  4818. address end = nm->dependencies_end();
  4819. guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
  4820. Copy::disjoint_words((HeapWord*) content_bytes(),
  4821. (HeapWord*) beg,
  4822. size_in_bytes() / sizeof(HeapWord));
  4823. assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
  4824. }
  4825. static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
  4826. for (int i = 0; i < narg; i++) {
  4827. int diff = p1[i]->ident() - p2[i]->ident();
  4828. if (diff != 0) return diff;
  4829. }
  4830. return 0;
  4831. }
  4832. static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
  4833. { return sort_dep(p1, p2, 1); }
  4834. static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
  4835. { return sort_dep(p1, p2, 2); }
  4836. static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
  4837. { return sort_dep(p1, p2, 3); }
  4838. void Dependencies::sort_all_deps() {
  4839. for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
  4840. DepType dept = (DepType)deptv;
  4841. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4842. if (deps->length() <= 1) continue;
  4843. switch (dep_args(dept)) {
  4844. case 1: deps->sort(sort_dep_arg_1, 1); break;
  4845. case 2: deps->sort(sort_dep_arg_2, 2); break;
  4846. case 3: deps->sort(sort_dep_arg_3, 3); break;
  4847. default: ShouldNotReachHere();
  4848. }
  4849. }
  4850. }
  4851. size_t Dependencies::estimate_size_in_bytes() {
  4852. size_t est_size = 100;
  4853. for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
  4854. DepType dept = (DepType)deptv;
  4855. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4856. est_size += deps->length()*2; // tags and argument(s)
  4857. }
  4858. return est_size;
  4859. }
  4860. ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
  4861. switch (dept) {
  4862. case abstract_with_exclusive_concrete_subtypes_2:
  4863. return x->as_metadata()->as_klass();
  4864. case unique_concrete_method:
  4865. case exclusive_concrete_methods_2:
  4866. return x->as_metadata()->as_method()->holder();
  4867. }
  4868. return NULL; // let NULL be NULL
  4869. }
  4870. Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
  4871. assert(must_be_in_vm(), "raw oops here");
  4872. switch (dept) {
  4873. case abstract_with_exclusive_concrete_subtypes_2:
  4874. assert(x->is_klass(), "sanity");
  4875. return (Klass*) x;
  4876. case unique_concrete_method:
  4877. case exclusive_concrete_methods_2:
  4878. assert(x->is_method(), "sanity");
  4879. return ((Method*)x)->method_holder();
  4880. }
  4881. return NULL; // let NULL be NULL
  4882. }
  4883. void Dependencies::encode_content_bytes() {
  4884. sort_all_deps();
  4885. CompressedWriteStream bytes((int)estimate_size_in_bytes());
  4886. for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
  4887. DepType dept = (DepType)deptv;
  4888. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4889. if (deps->length() == 0) continue;
  4890. int stride = dep_args(dept);
  4891. int ctxkj = dep_context_arg(dept); // -1 if no context arg
  4892. assert(stride > 0, "sanity");
  4893. for (int i = 0; i < deps->length(); i += stride) {
  4894. jbyte code_byte = (jbyte)dept;
  4895. int skipj = -1;
  4896. if (ctxkj >= 0 && ctxkj+1 < stride) {
  4897. ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
  4898. ciBaseObject* x = deps->at(i+ctxkj+1); // following argument
  4899. if (ctxk == ctxk_encoded_as_null(dept, x)) {
  4900. skipj = ctxkj; // we win: maybe one less oop to keep track of
  4901. code_byte |= default_context_type_bit;
  4902. }
  4903. }
  4904. bytes.write_byte(code_byte);
  4905. for (int j = 0; j < stride; j++) {
  4906. if (j == skipj) continue;
  4907. ciBaseObject* v = deps->at(i+j);
  4908. int idx;
  4909. if (v->is_object()) {
  4910. idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
  4911. } else {
  4912. ciMetadata* meta = v->as_metadata();
  4913. idx = _oop_recorder->find_index(meta->constant_encoding());
  4914. }
  4915. bytes.write_int(idx);
  4916. }
  4917. }
  4918. }
  4919. bytes.write_byte(end_marker);
  4920. while (bytes.position() % sizeof(HeapWord) != 0) {
  4921. bytes.write_byte(end_marker);
  4922. }
  4923. assert((jbyte)default_context_type_bit != 0, "byte overflow");
  4924. _content_bytes = bytes.buffer();
  4925. _size_in_bytes = bytes.position();
  4926. }
  4927. const char* Dependencies::_dep_name[TYPE_LIMIT] = {
  4928. "end_marker",
  4929. "evol_method",
  4930. "leaf_type",
  4931. "abstract_with_unique_concrete_subtype",
  4932. "abstract_with_no_concrete_subtype",
  4933. "concrete_with_no_concrete_subtype",
  4934. "unique_concrete_method",
  4935. "abstract_with_exclusive_concrete_subtypes_2",
  4936. "exclusive_concrete_methods_2",
  4937. "no_finalizable_subclasses",
  4938. "call_site_target_value"
  4939. };
  4940. int Dependencies::_dep_args[TYPE_LIMIT] = {
  4941. -1,// end_marker
  4942. 1, // evol_method m
  4943. 1, // leaf_type ctxk
  4944. 2, // abstract_with_unique_concrete_subtype ctxk, k
  4945. 1, // abstract_with_no_concrete_subtype ctxk
  4946. 1, // concrete_with_no_concrete_subtype ctxk
  4947. 2, // unique_concrete_method ctxk, m
  4948. 3, // unique_concrete_subtypes_2 ctxk, k1, k2
  4949. 3, // unique_concrete_methods_2 ctxk, m1, m2
  4950. 1, // no_finalizable_subclasses ctxk
  4951. 2 // call_site_target_value call_site, method_handle
  4952. };
  4953. const char* Dependencies::dep_name(Dependencies::DepType dept) {
  4954. if (!dept_in_mask(dept, all_types)) return "?bad-dep?";
  4955. return _dep_name[dept];
  4956. }
  4957. int Dependencies::dep_args(Dependencies::DepType dept) {
  4958. if (!dept_in_mask(dept, all_types)) return -1;
  4959. return _dep_args[dept];
  4960. }
  4961. void Dependencies::check_valid_dependency_type(DepType dept) {
  4962. guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
  4963. }
  4964. void Dependencies::log_all_dependencies() {
  4965. if (log() == NULL) return;
  4966. ResourceMark rm;
  4967. for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
  4968. DepType dept = (DepType)deptv;
  4969. GrowableArray<ciBaseObject*>* deps = _deps[dept];
  4970. int deplen = deps->length();
  4971. if (deplen == 0) {
  4972. continue;
  4973. }
  4974. int stride = dep_args(dept);
  4975. GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
  4976. for (int i = 0; i < deps->length(); i += stride) {
  4977. for (int j = 0; j < stride; j++) {
  4978. ciargs->push(deps->at(i+j));
  4979. }
  4980. write_dependency_to(log(), dept, ciargs);
  4981. ciargs->clear();
  4982. }
  4983. guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
  4984. }
  4985. }
  4986. void Dependencies::write_dependency_to(CompileLog* log,
  4987. DepType dept,
  4988. GrowableArray<DepArgument>* args,
  4989. Klass* witness) {
  4990. if (log == NULL) {
  4991. return;
  4992. }
  4993. ResourceMark rm;
  4994. ciEnv* env = ciEnv::current();
  4995. GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
  4996. for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
  4997. DepArgument arg = *it;
  4998. if (arg.is_oop()) {
  4999. ciargs->push(env->get_object(arg.oop_value()));
  5000. } else {
  5001. ciargs->push(env->get_metadata(arg.metadata_value()));
  5002. }
  5003. }
  5004. int argslen = ciargs->length();
  5005. Dependencies::write_dependency_to(log, dept, ciargs, witness);
  5006. guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
  5007. }
  5008. void Dependencies::write_dependency_to(CompileLog* log,
  5009. DepType dept,
  5010. GrowableArray<ciBaseObject*>* args,
  5011. Klass* witness) {
  5012. if (log == NULL) {
  5013. return;
  5014. }
  5015. ResourceMark rm;
  5016. GrowableArray<int>* argids = new GrowableArray<int>(args->length());
  5017. for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
  5018. ciBaseObject* obj = *it;
  5019. if (obj->is_object()) {
  5020. argids->push(log->identify(obj->as_object()));
  5021. } else {
  5022. argids->push(log->identify(obj->as_metadata()));
  5023. }
  5024. }
  5025. if (witness != NULL) {
  5026. log->begin_elem("dependency_failed");
  5027. } else {
  5028. log->begin_elem("dependency");
  5029. }
  5030. log->print(" type='%s'", dep_name(dept));
  5031. const int ctxkj = dep_context_arg(dept); // -1 if no context arg
  5032. if (ctxkj >= 0 && ctxkj < argids->length()) {
  5033. log->print(" ctxk='%d'", argids->at(ctxkj));
  5034. }
  5035. for (int j = 0; j < argids->length(); j++) {
  5036. if (j == ctxkj) continue; // already logged
  5037. if (j == 1) {
  5038. log->print( " x='%d'", argids->at(j));
  5039. } else {
  5040. log->print(" x%d='%d'", j, argids->at(j));
  5041. }
  5042. }
  5043. if (witness != NULL) {
  5044. log->object("witness", witness);
  5045. log->stamp();
  5046. }
  5047. log->end_elem();
  5048. }
  5049. void Dependencies::write_dependency_to(xmlStream* xtty,
  5050. DepType dept,
  5051. GrowableArray<DepArgument>* args,
  5052. Klass* witness) {
  5053. if (xtty == NULL) {
  5054. return;
  5055. }
  5056. ResourceMark rm;
  5057. ttyLocker ttyl;
  5058. int ctxkj = dep_context_arg(dept); // -1 if no context arg
  5059. if (witness != NULL) {
  5060. xtty->begin_elem("dependency_failed");
  5061. } else {
  5062. xtty->begin_elem("dependency");
  5063. }
  5064. xtty->print(" type='%s'", dep_name(dept));
  5065. if (ctxkj >= 0) {
  5066. xtty->object("ctxk", args->at(ctxkj).metadata_value());
  5067. }
  5068. for (int j = 0; j < args->length(); j++) {
  5069. if (j == ctxkj) continue; // already logged
  5070. DepArgument arg = args->at(j);
  5071. if (j == 1) {
  5072. if (arg.is_oop()) {
  5073. xtty->object("x", arg.oop_value());
  5074. } else {
  5075. xtty->object("x", arg.metadata_value());
  5076. }
  5077. } else {
  5078. char xn[12]; sprintf(xn, "x%d", j);
  5079. if (arg.is_oop()) {
  5080. xtty->object(xn, arg.oop_value());
  5081. } else {
  5082. xtty->object(xn, arg.metadata_value());
  5083. }
  5084. }
  5085. }
  5086. if (witness != NULL) {
  5087. xtty->object("witness", witness);
  5088. xtty->stamp();
  5089. }
  5090. xtty->end_elem();
  5091. }
  5092. void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
  5093. Klass* witness) {
  5094. ResourceMark rm;
  5095. ttyLocker ttyl; // keep the following output all in one block
  5096. tty->print_cr("%s of type %s",
  5097. (witness == NULL)? "Dependency": "Failed dependency",
  5098. dep_name(dept));
  5099. int ctxkj = dep_context_arg(dept); // -1 if no context arg
  5100. for (int j = 0; j < args->length(); j++) {
  5101. DepArgument arg = args->at(j);
  5102. bool put_star = false;
  5103. if (arg.is_null()) continue;
  5104. const char* what;
  5105. if (j == ctxkj) {
  5106. assert(arg.is_metadata(), "must be");
  5107. what = "context";
  5108. put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
  5109. } else if (arg.is_method()) {
  5110. what = "method ";
  5111. put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
  5112. } else if (arg.is_klass()) {
  5113. what = "class ";
  5114. } else {
  5115. what = "object ";
  5116. }
  5117. tty->print(" %s = %s", what, (put_star? "*": ""));
  5118. if (arg.is_klass())
  5119. tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
  5120. else if (arg.is_method())
  5121. ((Method*)arg.metadata_value())->print_value();
  5122. else
  5123. ShouldNotReachHere(); // Provide impl for this type.
  5124. tty->cr();
  5125. }
  5126. if (witness != NULL) {
  5127. bool put_star = !Dependencies::is_concrete_klass(witness);
  5128. tty->print_cr(" witness = %s%s",
  5129. (put_star? "*": ""),
  5130. witness->external_name());
  5131. }
  5132. }
  5133. void Dependencies::DepStream::log_dependency(Klass* witness) {
  5134. if (_deps == NULL && xtty == NULL) return; // fast cutout for runtime
  5135. ResourceMark rm;
  5136. const int nargs = argument_count();
  5137. GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
  5138. for (int j = 0; j < nargs; j++) {
  5139. if (type() == call_site_target_value) {
  5140. args->push(argument_oop(j));
  5141. } else {
  5142. args->push(argument(j));
  5143. }
  5144. }
  5145. int argslen = args->length();
  5146. if (_deps != NULL && _deps->log() != NULL) {
  5147. Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
  5148. } else {
  5149. Dependencies::write_dependency_to(xtty, type(), args, witness);
  5150. }
  5151. guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
  5152. }
  5153. void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
  5154. ResourceMark rm;
  5155. int nargs = argument_count();
  5156. GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
  5157. for (int j = 0; j < nargs; j++) {
  5158. args->push(argument(j));
  5159. }
  5160. int argslen = args->length();
  5161. Dependencies::print_dependency(type(), args, witness);
  5162. if (verbose) {
  5163. if (_code != NULL) {
  5164. tty->print(" code: ");
  5165. _code->print_value_on(tty);
  5166. tty->cr();
  5167. }
  5168. }
  5169. guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
  5170. }
  5171. #ifdef ASSERT
  5172. void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
  5173. assert(must_be_in_vm(), "raw oops here");
  5174. _byte_limit = byte_limit;
  5175. _type = (DepType)(end_marker-1); // defeat "already at end" assert
  5176. assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
  5177. }
  5178. #endif //ASSERT
  5179. bool Dependencies::DepStream::next() {
  5180. assert(_type != end_marker, "already at end");
  5181. if (_bytes.position() == 0 && _code != NULL
  5182. && _code->dependencies_size() == 0) {
  5183. return false;
  5184. }
  5185. int code_byte = (_bytes.read_byte() & 0xFF);
  5186. if (code_byte == end_marker) {
  5187. DEBUG_ONLY(_type = end_marker);
  5188. return false;
  5189. } else {
  5190. int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
  5191. code_byte -= ctxk_bit;
  5192. DepType dept = (DepType)code_byte;
  5193. _type = dept;
  5194. Dependencies::check_valid_dependency_type(dept);
  5195. int stride = _dep_args[dept];
  5196. assert(stride == dep_args(dept), "sanity");
  5197. int skipj = -1;
  5198. if (ctxk_bit != 0) {
  5199. skipj = 0; // currently the only context argument is at zero
  5200. assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
  5201. }
  5202. for (int j = 0; j < stride; j++) {
  5203. _xi[j] = (j == skipj)? 0: _bytes.read_int();
  5204. }
  5205. DEBUG_ONLY(_xi[stride] = -1); // help detect overruns
  5206. return true;
  5207. }
  5208. }
  5209. inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
  5210. Metadata* o = NULL;
  5211. if (_code != NULL) {
  5212. o = _code->metadata_at(i);
  5213. } else {
  5214. o = _deps->oop_recorder()->metadata_at(i);
  5215. }
  5216. return o;
  5217. }
  5218. inline oop Dependencies::DepStream::recorded_oop_at(int i) {
  5219. return (_code != NULL)
  5220. ? _code->oop_at(i)
  5221. : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
  5222. }
  5223. Metadata* Dependencies::DepStream::argument(int i) {
  5224. Metadata* result = recorded_metadata_at(argument_index(i));
  5225. if (result == NULL) { // Explicit context argument can be compressed
  5226. int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
  5227. if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
  5228. result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
  5229. }
  5230. }
  5231. assert(result == NULL || result->is_klass() || result->is_method(), "must be");
  5232. return result;
  5233. }
  5234. oop Dependencies::DepStream::argument_oop(int i) {
  5235. oop result = recorded_oop_at(argument_index(i));
  5236. assert(result == NULL || result->is_oop(), "must be");
  5237. return result;
  5238. }
  5239. Klass* Dependencies::DepStream::context_type() {
  5240. assert(must_be_in_vm(), "raw oops here");
  5241. {
  5242. int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg
  5243. if (ctxkj >= 0) {
  5244. Metadata* k = argument(ctxkj);
  5245. assert(k != NULL && k->is_klass(), "type check");
  5246. return (Klass*)k;
  5247. }
  5248. }
  5249. {
  5250. int ctxkj = dep_implicit_context_arg(type());
  5251. if (ctxkj >= 0) {
  5252. Klass* k = argument_oop(ctxkj)->klass();
  5253. assert(k != NULL && k->is_klass(), "type check");
  5254. return (Klass*) k;
  5255. }
  5256. }
  5257. return NULL;
  5258. }
  5259. class ClassHierarchyWalker {
  5260. public:
  5261. enum { PARTICIPANT_LIMIT = 3 };
  5262. private:
  5263. Symbol* _name;
  5264. Symbol* _signature;
  5265. Klass* _participants[PARTICIPANT_LIMIT+1];
  5266. int _num_participants;
  5267. Method* _found_methods[PARTICIPANT_LIMIT+1];
  5268. int _record_witnesses;
  5269. void initialize(Klass* participant) {
  5270. _record_witnesses = 0;
  5271. _participants[0] = participant;
  5272. _found_methods[0] = NULL;
  5273. _num_participants = 0;
  5274. if (participant != NULL) {
  5275. _participants[1] = NULL;
  5276. _found_methods[1] = NULL;
  5277. _num_participants = 1;
  5278. }
  5279. }
  5280. void initialize_from_method(Method* m) {
  5281. assert(m != NULL && m->is_method(), "sanity");
  5282. _name = m->name();
  5283. _signature = m->signature();
  5284. }
  5285. public:
  5286. ClassHierarchyWalker(Klass* participant, Method* m) {
  5287. initialize_from_method(m);
  5288. initialize(participant);
  5289. }
  5290. ClassHierarchyWalker(Method* m) {
  5291. initialize_from_method(m);
  5292. initialize(NULL);
  5293. }
  5294. ClassHierarchyWalker(Klass* participant = NULL) {
  5295. _name = NULL;
  5296. _signature = NULL;
  5297. initialize(participant);
  5298. }
  5299. ClassHierarchyWalker(Klass* participants[], int num_participants) {
  5300. _name = NULL;
  5301. _signature = NULL;
  5302. initialize(NULL);
  5303. for (int i = 0; i < num_participants; ++i) {
  5304. add_participant(participants[i]);
  5305. }
  5306. }
  5307. bool doing_subtype_search() {
  5308. return _name == NULL;
  5309. }
  5310. int num_participants() { return _num_participants; }
  5311. Klass* participant(int n) {
  5312. assert((uint)n <= (uint)_num_participants, "oob");
  5313. return _participants[n];
  5314. }
  5315. Method* found_method(int n) {
  5316. assert((uint)n <= (uint)_num_participants, "oob");
  5317. Method* fm = _found_methods[n];
  5318. assert(n == _num_participants || fm != NULL, "proper usage");
  5319. if (fm != NULL && fm->method_holder() != _participants[n]) {
  5320. assert(fm->is_default_method(), "sanity");
  5321. return NULL;
  5322. }
  5323. return fm;
  5324. }
  5325. #ifdef ASSERT
  5326. bool check_method_context(Klass* ctxk, Method* m) {
  5327. if (m->method_holder() == ctxk)
  5328. return true; // Quick win.
  5329. if (m->is_private())
  5330. return false; // Quick lose. Should not happen.
  5331. if (!(m->is_public() || m->is_protected()))
  5332. return true; // Must punt the assertion to true.
  5333. Klass* k = ctxk;
  5334. Method* lm = k->lookup_method(m->name(), m->signature());
  5335. if (lm == NULL && k->oop_is_instance()) {
  5336. lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
  5337. m->signature());
  5338. }
  5339. if (lm == m)
  5340. return true;
  5341. if (lm != NULL) {
  5342. if (!(lm->is_public() || lm->is_protected())) {
  5343. return true; // Must punt the assertion to true.
  5344. }
  5345. if (lm->is_static()) {
  5346. return true;
  5347. }
  5348. if ( !Dependencies::is_concrete_method(lm, k)
  5349. && !Dependencies::is_concrete_method(m, ctxk)
  5350. && lm->method_holder()->is_subtype_of(m->method_holder()))
  5351. return true;
  5352. }
  5353. ResourceMark rm;
  5354. tty->print_cr("Dependency method not found in the associated context:");
  5355. tty->print_cr(" context = %s", ctxk->external_name());
  5356. tty->print( " method = "); m->print_short_name(tty); tty->cr();
  5357. if (lm != NULL) {
  5358. tty->print( " found = "); lm->print_short_name(tty); tty->cr();
  5359. }
  5360. return false;
  5361. }
  5362. #endif
  5363. void add_participant(Klass* participant) {
  5364. assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
  5365. int np = _num_participants++;
  5366. _participants[np] = participant;
  5367. _participants[np+1] = NULL;
  5368. _found_methods[np+1] = NULL;
  5369. }
  5370. void record_witnesses(int add) {
  5371. if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT;
  5372. assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
  5373. _record_witnesses = add;
  5374. }
  5375. bool is_witness(Klass* k) {
  5376. if (doing_subtype_search()) {
  5377. return Dependencies::is_concrete_klass(k);
  5378. } else if (!k->oop_is_instance()) {
  5379. return false; // no methods to find in an array type
  5380. } else {
  5381. Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::skip_private);
  5382. if (!Dependencies::is_concrete_method(m, k)) {
  5383. if (!k->is_interface() && m != NULL && m->is_abstract()) {
  5384. ClassHierarchyWalker wf(_participants, _num_participants);
  5385. Klass* w = wf.find_witness_subtype(k);
  5386. if (w != NULL) {
  5387. Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature, Klass::skip_private);
  5388. if (!Dependencies::is_concrete_method(wm, w)) {
  5389. _found_methods[_num_participants] = m;
  5390. return true;
  5391. }
  5392. }
  5393. }
  5394. Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
  5395. if (default_methods == NULL)
  5396. return false;
  5397. m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
  5398. if (!Dependencies::is_concrete_method(m, NULL))
  5399. return false;
  5400. }
  5401. _found_methods[_num_participants] = m;
  5402. return true;
  5403. }
  5404. }
  5405. bool is_participant(Klass* k) {
  5406. if (k == _participants[0]) {
  5407. return true;
  5408. } else if (_num_participants <= 1) {
  5409. return false;
  5410. } else {
  5411. return in_list(k, &_participants[1]);
  5412. }
  5413. }
  5414. bool ignore_witness(Klass* witness) {
  5415. if (_record_witnesses == 0) {
  5416. return false;
  5417. } else {
  5418. --_record_witnesses;
  5419. add_participant(witness);
  5420. return true;
  5421. }
  5422. }
  5423. static bool in_list(Klass* x, Klass** list) {
  5424. for (int i = 0; ; i++) {
  5425. Klass* y = list[i];
  5426. if (y == NULL) break;
  5427. if (y == x) return true;
  5428. }
  5429. return false; // not in list
  5430. }
  5431. private:
  5432. Klass* find_witness_anywhere(Klass* context_type,
  5433. bool participants_hide_witnesses,
  5434. bool top_level_call = true);
  5435. Klass* find_witness_in(KlassDepChange& changes,
  5436. Klass* context_type,
  5437. bool participants_hide_witnesses);
  5438. public:
  5439. bool witnessed_reabstraction_in_supers(Klass* k);
  5440. Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
  5441. assert(doing_subtype_search(), "must set up a subtype search");
  5442. const bool participants_hide_witnesses = true;
  5443. if (changes != NULL) {
  5444. return find_witness_in(*changes, context_type, participants_hide_witnesses);
  5445. } else {
  5446. return find_witness_anywhere(context_type, participants_hide_witnesses);
  5447. }
  5448. }
  5449. Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
  5450. assert(!doing_subtype_search(), "must set up a method definer search");
  5451. const bool participants_hide_witnesses = true;
  5452. if (changes != NULL) {
  5453. return find_witness_in(*changes, context_type, !participants_hide_witnesses);
  5454. } else {
  5455. return find_witness_anywhere(context_type, !participants_hide_witnesses);
  5456. }
  5457. }
  5458. };
  5459. #ifndef PRODUCT
  5460. static int deps_find_witness_calls = 0;
  5461. static int deps_find_witness_steps = 0;
  5462. static int deps_find_witness_recursions = 0;
  5463. static int deps_find_witness_singles = 0;
  5464. static int deps_find_witness_print = 0; // set to -1 to force a final print
  5465. static bool count_find_witness_calls() {
  5466. if (TraceDependencies || LogCompilation) {
  5467. int pcount = deps_find_witness_print + 1;
  5468. bool final_stats = (pcount == 0);
  5469. bool initial_call = (pcount == 1);
  5470. bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
  5471. if (pcount < 0) pcount = 1; // crude overflow protection
  5472. deps_find_witness_print = pcount;
  5473. if (VerifyDependencies && initial_call) {
  5474. tty->print_cr("Warning: TraceDependencies results may be inflated by VerifyDependencies");
  5475. }
  5476. if (occasional_print || final_stats) {
  5477. if (xtty != NULL) {
  5478. ttyLocker ttyl;
  5479. xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
  5480. deps_find_witness_calls,
  5481. deps_find_witness_steps,
  5482. deps_find_witness_recursions,
  5483. deps_find_witness_singles);
  5484. }
  5485. if (final_stats || (TraceDependencies && WizardMode)) {
  5486. ttyLocker ttyl;
  5487. tty->print_cr("Dependency check (find_witness) "
  5488. "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
  5489. deps_find_witness_calls,
  5490. deps_find_witness_steps,
  5491. (double)deps_find_witness_steps / deps_find_witness_calls,
  5492. deps_find_witness_recursions,
  5493. deps_find_witness_singles);
  5494. }
  5495. }
  5496. return true;
  5497. }
  5498. return false;
  5499. }
  5500. #else
  5501. #define count_find_witness_calls() (0)
  5502. #endif //PRODUCT
  5503. Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
  5504. Klass* context_type,
  5505. bool participants_hide_witnesses) {
  5506. assert(changes.involves_context(context_type), "irrelevant dependency");
  5507. Klass* new_type = changes.new_type();
  5508. (void)count_find_witness_calls();
  5509. NOT_PRODUCT(deps_find_witness_singles++);
  5510. assert(must_be_in_vm(), "raw oops here");
  5511. assert_locked_or_safepoint(Compile_lock);
  5512. int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
  5513. if (nof_impls > 1) {
  5514. return context_type;
  5515. }
  5516. assert(!is_participant(new_type), "only old classes are participants");
  5517. if (participants_hide_witnesses) {
  5518. for (int i = 0; i < num_participants(); i++) {
  5519. Klass* part = participant(i);
  5520. if (part == NULL) continue;
  5521. assert(changes.involves_context(part) == new_type->is_subtype_of(part),
  5522. "correct marking of participants, b/c new_type is unique");
  5523. if (changes.involves_context(part)) {
  5524. return NULL;
  5525. }
  5526. }
  5527. }
  5528. if (is_witness(new_type) && !ignore_witness(new_type)) {
  5529. return new_type;
  5530. }
  5531. return NULL;
  5532. }
  5533. Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
  5534. bool participants_hide_witnesses,
  5535. bool top_level_call) {
  5536. assert(must_be_in_vm(), "raw oops here");
  5537. assert_locked_or_safepoint(Compile_lock);
  5538. bool do_counts = count_find_witness_calls();
  5539. if (top_level_call) {
  5540. if (do_counts) {
  5541. NOT_PRODUCT(deps_find_witness_calls++);
  5542. NOT_PRODUCT(deps_find_witness_steps++);
  5543. }
  5544. if (is_participant(context_type)) {
  5545. if (participants_hide_witnesses) return NULL;
  5546. } else if (is_witness(context_type) && !ignore_witness(context_type)) {
  5547. return context_type;
  5548. }
  5549. }
  5550. const int CHAINMAX = 100; // >= 1 + InstanceKlass::implementors_limit
  5551. Klass* chains[CHAINMAX];
  5552. int chaini = 0; // index into worklist
  5553. Klass* chain; // scratch variable
  5554. #define ADD_SUBCLASS_CHAIN(k) { \
  5555. assert(chaini < CHAINMAX, "oob"); \
  5556. chain = k->subklass(); \
  5557. if (chain != NULL) chains[chaini++] = chain; }
  5558. ADD_SUBCLASS_CHAIN(context_type);
  5559. if (top_level_call) {
  5560. int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
  5561. if (nof_impls > 1) {
  5562. return context_type;
  5563. }
  5564. if (nof_impls > 0) {
  5565. Klass* impl = InstanceKlass::cast(context_type)->implementor();
  5566. assert(impl != NULL, "just checking");
  5567. if (impl == context_type) {
  5568. return context_type; // report an inexact witness to this sad affair
  5569. }
  5570. if (do_counts)
  5571. { NOT_PRODUCT(deps_find_witness_steps++); }
  5572. if (is_participant(impl)) {
  5573. if (!participants_hide_witnesses) {
  5574. ADD_SUBCLASS_CHAIN(impl);
  5575. }
  5576. } else if (is_witness(impl) && !ignore_witness(impl)) {
  5577. return impl;
  5578. } else {
  5579. ADD_SUBCLASS_CHAIN(impl);
  5580. }
  5581. }
  5582. }
  5583. while (chaini > 0) {
  5584. Klass* chain = chains[--chaini];
  5585. for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
  5586. if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
  5587. if (is_participant(sub)) {
  5588. if (participants_hide_witnesses) continue;
  5589. } else if (is_witness(sub) && !ignore_witness(sub)) {
  5590. return sub;
  5591. }
  5592. if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
  5593. ADD_SUBCLASS_CHAIN(sub);
  5594. } else {
  5595. if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
  5596. Klass* witness = find_witness_anywhere(sub,
  5597. participants_hide_witnesses,
  5598. if (witness != NULL) return witness;
  5599. }
  5600. }
  5601. }
  5602. return NULL;
  5603. #undef ADD_SUBCLASS_CHAIN
  5604. }
  5605. bool ClassHierarchyWalker::witnessed_reabstraction_in_supers(Klass* k) {
  5606. if (!k->oop_is_instance()) {
  5607. return false; // no methods to find in an array type
  5608. } else {
  5609. if (Dependencies::is_concrete_klass(k) && !k->is_interface()) {
  5610. Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::skip_private);
  5611. if (m != NULL) {
  5612. return false; // no reabstraction possible: local method found
  5613. }
  5614. for (InstanceKlass* super = InstanceKlass::cast(k)->java_super(); super != NULL; super = super->java_super()) {
  5615. m = super->find_instance_method(_name, _signature, Klass::skip_private);
  5616. if (m != NULL) { // inherited method found
  5617. if (m->is_abstract() || m->is_overpass()) {
  5618. _found_methods[_num_participants] = m;
  5619. return true; // abstract method found
  5620. }
  5621. return false;
  5622. }
  5623. }
  5624. assert(false, "root method not found");
  5625. return true;
  5626. }
  5627. return false;
  5628. }
  5629. }
  5630. bool Dependencies::is_concrete_klass(Klass* k) {
  5631. if (k->is_abstract()) return false;
  5632. return true;
  5633. }
  5634. bool Dependencies::is_concrete_method(Method* m, Klass * k) {
  5635. return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
  5636. m->is_overpass() && k != NULL && k -> is_abstract() );
  5637. }
  5638. Klass* Dependencies::find_finalizable_subclass(Klass* k) {
  5639. if (k->is_interface()) return NULL;
  5640. if (k->has_finalizer()) return k;
  5641. k = k->subklass();
  5642. while (k != NULL) {
  5643. Klass* result = find_finalizable_subclass(k);
  5644. if (result != NULL) return result;
  5645. k = k->next_sibling();
  5646. }
  5647. return NULL;
  5648. }
  5649. bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
  5650. if (k->is_abstract()) return false;
  5651. return true;
  5652. }
  5653. bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
  5654. return k->has_finalizable_subclass();
  5655. }
  5656. Klass* Dependencies::check_evol_method(Method* m) {
  5657. assert(must_be_in_vm(), "raw oops here");
  5658. if (m->is_old()
  5659. || m->number_of_breakpoints() > 0) {
  5660. return m->method_holder();
  5661. } else {
  5662. return NULL;
  5663. }
  5664. }
  5665. Klass* Dependencies::check_leaf_type(Klass* ctxk) {
  5666. assert(must_be_in_vm(), "raw oops here");
  5667. assert_locked_or_safepoint(Compile_lock);
  5668. InstanceKlass* ctx = InstanceKlass::cast(ctxk);
  5669. Klass* sub = ctx->subklass();
  5670. if (sub != NULL) {
  5671. return sub;
  5672. } else if (ctx->nof_implementors() != 0) {
  5673. Klass* impl = ctx->implementor();
  5674. assert(impl != NULL, "must be set");
  5675. return impl;
  5676. } else {
  5677. return NULL;
  5678. }
  5679. }
  5680. Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
  5681. Klass* conck,
  5682. KlassDepChange* changes) {
  5683. ClassHierarchyWalker wf(conck);
  5684. return wf.find_witness_subtype(ctxk, changes);
  5685. }
  5686. Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
  5687. KlassDepChange* changes) {
  5688. ClassHierarchyWalker wf;
  5689. return wf.find_witness_subtype(ctxk, changes);
  5690. }
  5691. Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
  5692. KlassDepChange* changes) {
  5693. ClassHierarchyWalker wf(ctxk);
  5694. return wf.find_witness_subtype(ctxk, changes);
  5695. }

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/你好赵伟/article/detail/254907
推荐阅读
相关标签
  

闽ICP备14008679号