diff --git a/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp b/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp index fdfab3ab5623b..18095632ac096 100644 --- a/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp @@ -34,6 +34,9 @@ #include "runtime/jniHandles.hpp" #include "runtime/sharedRuntime.hpp" #include "vmreg_aarch64.inline.hpp" +#if INCLUDE_ZGC +#include "gc/z/zBarrierSetAssembler.hpp" +#endif jint CodeInstaller::pd_next_offset(NativeInstruction* inst, jint pc_offset, JVMCI_TRAPS) { if (inst->is_call() || inst->is_jump() || inst->is_blr()) { @@ -164,24 +167,35 @@ void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &cbuf, methodHandle& metho } } -void CodeInstaller::pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS) { +bool CodeInstaller::pd_relocate(address pc, jint mark) { switch (mark) { case POLL_NEAR: - JVMCI_ERROR("unimplemented"); - break; + // This is unhandled and will be reported by the caller + return false; case POLL_FAR: _instructions->relocate(pc, relocInfo::poll_type); - break; + return true; case POLL_RETURN_NEAR: - JVMCI_ERROR("unimplemented"); - break; + // This is unhandled and will be reported by the caller + return false; case POLL_RETURN_FAR: _instructions->relocate(pc, relocInfo::poll_return_type); - break; - default: - JVMCI_ERROR("invalid mark value"); - break; + return true; + case Z_BARRIER_RELOCATION_FORMAT_LOAD_GOOD_BEFORE_TB_X: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatLoadGoodBeforeTbX); + return true; + case Z_BARRIER_RELOCATION_FORMAT_MARK_BAD_BEFORE_MOV: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatMarkBadBeforeMov); + return true; + case Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_BEFORE_MOV: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatStoreGoodBeforeMov); + return true; + case Z_BARRIER_RELOCATION_FORMAT_STORE_BAD_BEFORE_MOV: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatStoreBadBeforeMov); + return true; + } + return false; } // convert JVMCI register indices (as used in oop maps) to HotSpot registers diff --git a/src/hotspot/cpu/riscv/jvmciCodeInstaller_riscv.cpp b/src/hotspot/cpu/riscv/jvmciCodeInstaller_riscv.cpp index 35bfbb1df8ead..ba3d9c99aceb1 100644 --- a/src/hotspot/cpu/riscv/jvmciCodeInstaller_riscv.cpp +++ b/src/hotspot/cpu/riscv/jvmciCodeInstaller_riscv.cpp @@ -105,8 +105,9 @@ void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &cbuf, methodHandle& metho Unimplemented(); } -void CodeInstaller::pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS) { +bool CodeInstaller::pd_relocate(address pc, jint mark) { Unimplemented(); + return false; } // convert JVMCI register indices (as used in oop maps) to HotSpot registers diff --git a/src/hotspot/cpu/x86/jvmciCodeInstaller_x86.cpp b/src/hotspot/cpu/x86/jvmciCodeInstaller_x86.cpp index 09056b374ad29..94708d4224379 100644 --- a/src/hotspot/cpu/x86/jvmciCodeInstaller_x86.cpp +++ b/src/hotspot/cpu/x86/jvmciCodeInstaller_x86.cpp @@ -39,6 +39,9 @@ #include "classfile/vmSymbols.hpp" #include "code/vmreg.hpp" #include "vmreg_x86.inline.hpp" +#if INCLUDE_ZGC +#include "gc/z/zBarrierSetAssembler.hpp" +#endif jint CodeInstaller::pd_next_offset(NativeInstruction* inst, jint pc_offset, JVMCI_TRAPS) { if (inst->is_call() || inst->is_jump()) { @@ -197,7 +200,7 @@ void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &, methodHandle& method, j } } -void CodeInstaller::pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS) { +bool CodeInstaller::pd_relocate(address pc, jint mark) { switch (mark) { case POLL_NEAR: case POLL_FAR: @@ -206,15 +209,35 @@ void CodeInstaller::pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS) { // so that poll_Relocation::fix_relocation_after_move does the right // thing (i.e. ignores this relocation record) _instructions->relocate(pc, relocInfo::poll_type, Assembler::imm_operand); - break; + return true; case POLL_RETURN_NEAR: case POLL_RETURN_FAR: // see comment above for POLL_FAR _instructions->relocate(pc, relocInfo::poll_return_type, Assembler::imm_operand); - break; + return true; + case Z_BARRIER_RELOCATION_FORMAT_LOAD_GOOD_BEFORE_SHL: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatLoadGoodBeforeShl); + return true; + case Z_BARRIER_RELOCATION_FORMAT_LOAD_BAD_AFTER_TEST: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatLoadBadAfterTest); + return true; + case Z_BARRIER_RELOCATION_FORMAT_MARK_BAD_AFTER_TEST: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatMarkBadAfterTest); + return true; + case Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_CMP: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatStoreGoodAfterCmp); + return true; + case Z_BARRIER_RELOCATION_FORMAT_STORE_BAD_AFTER_TEST: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatStoreBadAfterTest); + return true; + case Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_OR: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatStoreGoodAfterOr); + return true; + case Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_MOV: + _instructions->relocate(pc, barrier_Relocation::spec(), ZBarrierRelocationFormatStoreGoodAfterMov); + return true; default: - JVMCI_ERROR("invalid mark value: %d", mark); - break; + return false; } } diff --git a/src/hotspot/share/code/nmethod.cpp b/src/hotspot/share/code/nmethod.cpp index bcfbae49fd992..7f91f69c9e38b 100644 --- a/src/hotspot/share/code/nmethod.cpp +++ b/src/hotspot/share/code/nmethod.cpp @@ -3638,10 +3638,22 @@ const char* nmethod::reloc_string_for(u_char* begin, u_char* end) { case relocInfo::poll_type: return "poll"; case relocInfo::poll_return_type: return "poll_return"; case relocInfo::trampoline_stub_type: return "trampoline_stub"; + case relocInfo::entry_guard_type: return "entry_guard"; + case relocInfo::post_call_nop_type: return "post_call_nop"; + case relocInfo::barrier_type: { + barrier_Relocation* const reloc = iter.barrier_reloc(); + stringStream st; + st.print("barrier format=%d", reloc->format()); + return st.as_string(); + } + case relocInfo::type_mask: return "type_bit_mask"; - default: - break; + default: { + stringStream st; + st.print("unknown relocInfo=%d", (int) iter.type()); + return st.as_string(); + } } } return have_one ? "other" : nullptr; diff --git a/src/hotspot/share/gc/z/zBarrier.hpp b/src/hotspot/share/gc/z/zBarrier.hpp index 4a271c1469fc9..fa0fbbcd88f95 100644 --- a/src/hotspot/share/gc/z/zBarrier.hpp +++ b/src/hotspot/share/gc/z/zBarrier.hpp @@ -151,6 +151,8 @@ class ZBarrier : public AllStatic { static zaddress load_barrier_on_oop_field(volatile zpointer* p); static zaddress load_barrier_on_oop_field_preloaded(volatile zpointer* p, zpointer o); + static void load_barrier_on_oop_array(volatile zpointer* p, size_t length); + static zaddress keep_alive_load_barrier_on_oop_field_preloaded(volatile zpointer* p, zpointer o); // Load barriers on non-strong oop refs diff --git a/src/hotspot/share/gc/z/zBarrier.inline.hpp b/src/hotspot/share/gc/z/zBarrier.inline.hpp index 2c81c14865b51..b3191e9ae3f7a 100644 --- a/src/hotspot/share/gc/z/zBarrier.inline.hpp +++ b/src/hotspot/share/gc/z/zBarrier.inline.hpp @@ -475,6 +475,12 @@ inline zaddress ZBarrier::keep_alive_load_barrier_on_oop_field_preloaded(volatil return barrier(is_mark_good_fast_path, keep_alive_slow_path, color_mark_good, p, o); } +inline void ZBarrier::load_barrier_on_oop_array(volatile zpointer* p, size_t length) { + for (volatile const zpointer* const end = p + length; p < end; p++) { + load_barrier_on_oop_field(p); + } +} + // // Load barrier on non-strong oop refs // diff --git a/src/hotspot/share/gc/z/zBarrierSetRuntime.cpp b/src/hotspot/share/gc/z/zBarrierSetRuntime.cpp index da7adf7cc3a80..b41fec3d0a552 100644 --- a/src/hotspot/share/gc/z/zBarrierSetRuntime.cpp +++ b/src/hotspot/share/gc/z/zBarrierSetRuntime.cpp @@ -63,6 +63,10 @@ JRT_LEAF(void, ZBarrierSetRuntime::store_barrier_on_native_oop_field_without_hea ZBarrier::store_barrier_on_native_oop_field((zpointer*)p, false /* heal */); JRT_END +JRT_LEAF(void, ZBarrierSetRuntime::load_barrier_on_oop_array(oop* p, size_t length)) + ZBarrier::load_barrier_on_oop_array((zpointer*)p, length); +JRT_END + JRT_LEAF(void, ZBarrierSetRuntime::clone(oopDesc* src, oopDesc* dst, size_t size)) HeapAccess<>::clone(src, dst, size); JRT_END @@ -126,6 +130,10 @@ address ZBarrierSetRuntime::store_barrier_on_native_oop_field_without_healing_ad return reinterpret_cast
(store_barrier_on_native_oop_field_without_healing); } +address ZBarrierSetRuntime::load_barrier_on_oop_array_addr() { + return reinterpret_cast
(load_barrier_on_oop_array); +} + address ZBarrierSetRuntime::clone_addr() { return reinterpret_cast
(clone); } diff --git a/src/hotspot/share/gc/z/zBarrierSetRuntime.hpp b/src/hotspot/share/gc/z/zBarrierSetRuntime.hpp index a569ff3c15818..8a81f162bf1ef 100644 --- a/src/hotspot/share/gc/z/zBarrierSetRuntime.hpp +++ b/src/hotspot/share/gc/z/zBarrierSetRuntime.hpp @@ -41,6 +41,7 @@ class ZBarrierSetRuntime : public AllStatic { static void store_barrier_on_oop_field_with_healing(oop* p); static void store_barrier_on_oop_field_without_healing(oop* p); static void store_barrier_on_native_oop_field_without_healing(oop* p); + static void load_barrier_on_oop_array(oop* p, size_t length); static void clone(oopDesc* src, oopDesc* dst, size_t size); public: @@ -54,6 +55,7 @@ class ZBarrierSetRuntime : public AllStatic { static address store_barrier_on_oop_field_with_healing_addr(); static address store_barrier_on_oop_field_without_healing_addr(); static address store_barrier_on_native_oop_field_without_healing_addr(); + static address load_barrier_on_oop_array_addr(); static address clone_addr(); }; diff --git a/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp b/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp index 52a060427d5d4..c62257bd23b1c 100644 --- a/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp +++ b/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp @@ -1297,6 +1297,10 @@ void CodeInstaller::site_Mark(CodeBuffer& buffer, jint pc_offset, HotSpotCompile u1 id = stream->read_u1("mark:id"); address pc = _instructions->start() + pc_offset; + if (pd_relocate(pc, id)) { + return; + } + switch (id) { case UNVERIFIED_ENTRY: _offsets.set_value(CodeOffsets::Entry, pc_offset); @@ -1330,12 +1334,6 @@ void CodeInstaller::site_Mark(CodeBuffer& buffer, jint pc_offset, HotSpotCompile _next_call_type = (MarkId) id; _invoke_mark_pc = pc; break; - case POLL_NEAR: - case POLL_FAR: - case POLL_RETURN_NEAR: - case POLL_RETURN_FAR: - pd_relocate_poll(pc, id, JVMCI_CHECK); - break; case CARD_TABLE_SHIFT: case CARD_TABLE_ADDRESS: case HEAP_TOP_ADDRESS: @@ -1350,6 +1348,7 @@ void CodeInstaller::site_Mark(CodeBuffer& buffer, jint pc_offset, HotSpotCompile case VERIFY_OOP_MASK: case VERIFY_OOP_COUNT_ADDRESS: break; + default: JVMCI_ERROR("invalid mark id: %d%s", id, stream->context()); break; diff --git a/src/hotspot/share/jvmci/jvmciCodeInstaller.hpp b/src/hotspot/share/jvmci/jvmciCodeInstaller.hpp index 98fed480bf1d7..0cb7f28748053 100644 --- a/src/hotspot/share/jvmci/jvmciCodeInstaller.hpp +++ b/src/hotspot/share/jvmci/jvmciCodeInstaller.hpp @@ -176,6 +176,23 @@ class CodeInstaller : public StackObj { VERIFY_OOP_BITS, VERIFY_OOP_MASK, VERIFY_OOP_COUNT_ADDRESS, + +#ifdef X86 + Z_BARRIER_RELOCATION_FORMAT_LOAD_GOOD_BEFORE_SHL, + Z_BARRIER_RELOCATION_FORMAT_LOAD_BAD_AFTER_TEST, + Z_BARRIER_RELOCATION_FORMAT_MARK_BAD_AFTER_TEST, + Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_CMP, + Z_BARRIER_RELOCATION_FORMAT_STORE_BAD_AFTER_TEST, + Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_OR, + Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_MOV, +#endif +#ifdef AARCH64 + Z_BARRIER_RELOCATION_FORMAT_LOAD_GOOD_BEFORE_TB_X, + Z_BARRIER_RELOCATION_FORMAT_MARK_BAD_BEFORE_MOV, + Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_BEFORE_MOV, + Z_BARRIER_RELOCATION_FORMAT_STORE_BAD_BEFORE_MOV, +#endif + INVOKE_INVALID = -1 }; @@ -312,7 +329,7 @@ class CodeInstaller : public StackObj { void pd_patch_DataSectionReference(int pc_offset, int data_offset, JVMCI_TRAPS); void pd_relocate_ForeignCall(NativeInstruction* inst, jlong foreign_call_destination, JVMCI_TRAPS); void pd_relocate_JavaMethod(CodeBuffer &cbuf, methodHandle& method, jint pc_offset, JVMCI_TRAPS); - void pd_relocate_poll(address pc, jint mark, JVMCI_TRAPS); + bool pd_relocate(address pc, jint mark); public: diff --git a/src/hotspot/share/jvmci/jvmciCompilerToVM.cpp b/src/hotspot/share/jvmci/jvmciCompilerToVM.cpp index fb06abe9174ef..d92d193017362 100644 --- a/src/hotspot/share/jvmci/jvmciCompilerToVM.cpp +++ b/src/hotspot/share/jvmci/jvmciCompilerToVM.cpp @@ -1208,12 +1208,19 @@ C2V_VMENTRY_NULL(jobject, executeHotSpotNmethod, (JNIEnv* env, jobject, jobject HandleMark hm(THREAD); JVMCIObject nmethod_mirror = JVMCIENV->wrap(hs_nmethod); - JVMCINMethodHandle nmethod_handle(THREAD); - nmethod* nm = JVMCIENV->get_nmethod(nmethod_mirror, nmethod_handle); - if (nm == nullptr || !nm->is_in_use()) { - JVMCI_THROW_NULL(InvalidInstalledCodeException); + methodHandle mh; + { + // Reduce the scope of JVMCINMethodHandle so that it isn't alive across the Java call. Once the + // nmethod has been validated and the method is fetched from the nmethod it's fine for the + // nmethod to be reclaimed if necessary. + JVMCINMethodHandle nmethod_handle(THREAD); + nmethod* nm = JVMCIENV->get_nmethod(nmethod_mirror, nmethod_handle); + if (nm == nullptr || !nm->is_in_use()) { + JVMCI_THROW_NULL(InvalidInstalledCodeException); + } + methodHandle nmh(THREAD, nm->method()); + mh = nmh; } - methodHandle mh(THREAD, nm->method()); Symbol* signature = mh->signature(); JavaCallArguments jca(mh->size_of_parameters()); diff --git a/src/hotspot/share/jvmci/jvmciCompilerToVM.hpp b/src/hotspot/share/jvmci/jvmciCompilerToVM.hpp index ff159a490c184..b7ae365c1936d 100644 --- a/src/hotspot/share/jvmci/jvmciCompilerToVM.hpp +++ b/src/hotspot/share/jvmci/jvmciCompilerToVM.hpp @@ -68,6 +68,10 @@ class CompilerToVM { static address ZBarrierSetRuntime_load_barrier_on_oop_array; static address ZBarrierSetRuntime_clone; + static address ZPointerVectorLoadBadMask_address; + static address ZPointerVectorStoreBadMask_address; + static address ZPointerVectorStoreGoodMask_address; + static bool continuations_enabled; static size_t ThreadLocalAllocBuffer_alignment_reserve; @@ -100,6 +104,7 @@ class CompilerToVM { static int sizeof_narrowKlass; static int sizeof_arrayOopDesc; static int sizeof_BasicLock; + static int sizeof_ZStoreBarrierEntry; static address dsin; static address dcos; diff --git a/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp b/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp index a2ba0f2b3e578..220667ad2ced0 100644 --- a/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp +++ b/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp @@ -38,6 +38,8 @@ #if INCLUDE_ZGC #include "gc/x/xBarrierSetRuntime.hpp" #include "gc/x/xThreadLocalData.hpp" +#include "gc/z/zBarrierSetRuntime.hpp" +#include "gc/z/zThreadLocalData.hpp" #endif #include "jvmci/jvmciCompilerToVM.hpp" #include "jvmci/jvmciEnv.hpp" @@ -80,6 +82,10 @@ address CompilerToVM::Data::ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_ address CompilerToVM::Data::ZBarrierSetRuntime_load_barrier_on_oop_array; address CompilerToVM::Data::ZBarrierSetRuntime_clone; +address CompilerToVM::Data::ZPointerVectorLoadBadMask_address; +address CompilerToVM::Data::ZPointerVectorStoreBadMask_address; +address CompilerToVM::Data::ZPointerVectorStoreGoodMask_address; + bool CompilerToVM::Data::continuations_enabled; #ifdef AARCH64 @@ -117,6 +123,7 @@ int CompilerToVM::Data::sizeof_ConstantPool = sizeof(ConstantPool); int CompilerToVM::Data::sizeof_narrowKlass = sizeof(narrowKlass); int CompilerToVM::Data::sizeof_arrayOopDesc = sizeof(arrayOopDesc); int CompilerToVM::Data::sizeof_BasicLock = sizeof(BasicLock); +int CompilerToVM::Data::sizeof_ZStoreBarrierEntry = sizeof(ZStoreBarrierEntry); address CompilerToVM::Data::dsin; address CompilerToVM::Data::dcos; @@ -157,15 +164,23 @@ void CompilerToVM::Data::initialize(JVMCI_TRAPS) { #if INCLUDE_ZGC if (UseZGC) { - thread_address_bad_mask_offset = in_bytes(XThreadLocalData::address_bad_mask_offset()); - ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(); - ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded_addr(); - ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded_addr(); - ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded_addr(); - ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded_addr(); - ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded_addr(); - ZBarrierSetRuntime_load_barrier_on_oop_array = XBarrierSetRuntime::load_barrier_on_oop_array_addr(); - ZBarrierSetRuntime_clone = XBarrierSetRuntime::clone_addr(); + if (ZGenerational) { + ZPointerVectorLoadBadMask_address = (address) &ZPointerVectorLoadBadMask; + ZPointerVectorStoreBadMask_address = (address) &ZPointerVectorStoreBadMask; + ZPointerVectorStoreGoodMask_address = (address) &ZPointerVectorStoreGoodMask; + } else { + thread_address_bad_mask_offset = in_bytes(XThreadLocalData::address_bad_mask_offset()); + // Initialize the old names for compatibility. The proper XBarrierSetRuntime names are + // exported as addresses in vmStructs_jvmci.cpp as are the new ZBarrierSetRuntime names. + ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(); + ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded_addr(); + ZBarrierSetRuntime_load_barrier_on_phantom_oop_field_preloaded = XBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded_addr(); + ZBarrierSetRuntime_weak_load_barrier_on_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded_addr(); + ZBarrierSetRuntime_weak_load_barrier_on_weak_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded_addr(); + ZBarrierSetRuntime_weak_load_barrier_on_phantom_oop_field_preloaded = XBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded_addr(); + ZBarrierSetRuntime_load_barrier_on_oop_array = XBarrierSetRuntime::load_barrier_on_oop_array_addr(); + ZBarrierSetRuntime_clone = XBarrierSetRuntime::clone_addr(); + } } #endif diff --git a/src/hotspot/share/jvmci/jvmciRuntime.cpp b/src/hotspot/share/jvmci/jvmciRuntime.cpp index 504fbfcb1b078..9770e329a8813 100644 --- a/src/hotspot/share/jvmci/jvmciRuntime.cpp +++ b/src/hotspot/share/jvmci/jvmciRuntime.cpp @@ -874,7 +874,7 @@ int JVMCIRuntime::release_and_clear_oop_handles() { for (int i = 0; i < _oop_handles.length(); i++) { oop* oop_ptr = _oop_handles.at(i); guarantee(oop_ptr != nullptr, "release_cleared_oop_handles left null entry in _oop_handles"); - guarantee(*oop_ptr != nullptr, "unexpected cleared handle"); + guarantee(NativeAccess<>::oop_load(oop_ptr) != nullptr, "unexpected cleared handle"); // Satisfy OopHandles::release precondition that all // handles being released are null. NativeAccess<>::oop_store(oop_ptr, (oop) nullptr); @@ -889,7 +889,7 @@ int JVMCIRuntime::release_and_clear_oop_handles() { } static bool is_referent_non_null(oop* handle) { - return handle != nullptr && *handle != nullptr; + return handle != nullptr && NativeAccess<>::oop_load(handle) != nullptr; } // Swaps the elements in `array` at index `a` and index `b` diff --git a/src/hotspot/share/jvmci/jvmci_globals.cpp b/src/hotspot/share/jvmci/jvmci_globals.cpp index 8253332e3a919..86d8491b73303 100644 --- a/src/hotspot/share/jvmci/jvmci_globals.cpp +++ b/src/hotspot/share/jvmci/jvmci_globals.cpp @@ -223,16 +223,14 @@ bool JVMCIGlobals::enable_jvmci_product_mode(JVMFlagOrigin origin, bool use_graa } bool JVMCIGlobals::gc_supports_jvmci() { - return UseSerialGC || UseParallelGC || UseG1GC || (UseZGC && !ZGenerational); + return UseSerialGC || UseParallelGC || UseG1GC || UseZGC || UseEpsilonGC; } void JVMCIGlobals::check_jvmci_supported_gc() { if (EnableJVMCI) { // Check if selected GC is supported by JVMCI and Java compiler if (!gc_supports_jvmci()) { - log_warning(gc, jvmci)("Setting EnableJVMCI to false as selected GC does not support JVMCI: %s", GCConfig::hs_err_name()); - FLAG_SET_DEFAULT(EnableJVMCI, false); - FLAG_SET_DEFAULT(UseJVMCICompiler, false); + fatal("JVMCI does not support the selected GC"); } } } diff --git a/src/hotspot/share/jvmci/vmStructs_jvmci.cpp b/src/hotspot/share/jvmci/vmStructs_jvmci.cpp index f0ecc5af7d2cc..73bac7bd0909c 100644 --- a/src/hotspot/share/jvmci/vmStructs_jvmci.cpp +++ b/src/hotspot/share/jvmci/vmStructs_jvmci.cpp @@ -42,10 +42,17 @@ #include "runtime/stubRoutines.hpp" #include "runtime/vm_version.hpp" #if INCLUDE_G1GC +#include "gc/g1/g1BarrierSetRuntime.hpp" #include "gc/g1/g1CardTable.hpp" #include "gc/g1/g1HeapRegion.hpp" #include "gc/g1/g1ThreadLocalData.hpp" #endif +#if INCLUDE_ZGC +#include "gc/x/xBarrierSetRuntime.hpp" +#include "gc/z/zBarrierSetAssembler.hpp" +#include "gc/z/zBarrierSetRuntime.hpp" +#include "gc/z/zThreadLocalData.hpp" +#endif #define VM_STRUCTS(nonstatic_field, static_field, unchecked_nonstatic_field, volatile_nonstatic_field) \ static_field(CompilerToVM::Data, Klass_vtable_start_offset, int) \ @@ -66,6 +73,7 @@ static_field(CompilerToVM::Data, thread_disarmed_guard_value_offset, int) \ static_field(CompilerToVM::Data, thread_address_bad_mask_offset, int) \ AARCH64_ONLY(static_field(CompilerToVM::Data, BarrierSetAssembler_nmethod_patching_type, int)) \ + AARCH64_ONLY(static_field(CompilerToVM::Data, BarrierSetAssembler_patching_epoch_addr, address)) \ \ static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_oop_field_preloaded, address) \ static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_weak_oop_field_preloaded, address) \ @@ -76,6 +84,10 @@ static_field(CompilerToVM::Data, ZBarrierSetRuntime_load_barrier_on_oop_array, address) \ static_field(CompilerToVM::Data, ZBarrierSetRuntime_clone, address) \ \ + static_field(CompilerToVM::Data, ZPointerVectorLoadBadMask_address, address) \ + static_field(CompilerToVM::Data, ZPointerVectorStoreBadMask_address, address) \ + static_field(CompilerToVM::Data, ZPointerVectorStoreGoodMask_address, address) \ + \ static_field(CompilerToVM::Data, continuations_enabled, bool) \ \ static_field(CompilerToVM::Data, ThreadLocalAllocBuffer_alignment_reserve, size_t) \ @@ -109,6 +121,7 @@ static_field(CompilerToVM::Data, sizeof_narrowKlass, int) \ static_field(CompilerToVM::Data, sizeof_arrayOopDesc, int) \ static_field(CompilerToVM::Data, sizeof_BasicLock, int) \ + static_field(CompilerToVM::Data, sizeof_ZStoreBarrierEntry, int) \ \ static_field(CompilerToVM::Data, dsin, address) \ static_field(CompilerToVM::Data, dcos, address) \ @@ -784,7 +797,12 @@ declare_constant(markWord::no_hash_in_place) \ declare_constant(markWord::no_lock_in_place) \ -#define VM_ADDRESSES(declare_address, declare_preprocessor_address, declare_function) \ +// Helper macro to support ZGC pattern where the function itself isn't exported +#define DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, name) \ + declare_function_with_value(name, name##_addr()) + + +#define VM_ADDRESSES(declare_address, declare_preprocessor_address, declare_function, declare_function_with_value) \ declare_function(SharedRuntime::register_finalizer) \ declare_function(SharedRuntime::exception_handler_for_return_address) \ declare_function(SharedRuntime::OSR_migration_end) \ @@ -801,6 +819,26 @@ declare_function(os::javaTimeMillis) \ declare_function(os::javaTimeNanos) \ \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::load_barrier_on_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::weak_load_barrier_on_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::weak_load_barrier_on_weak_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::weak_load_barrier_on_phantom_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::load_barrier_on_oop_array)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, XBarrierSetRuntime::clone)) \ + \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::load_barrier_on_phantom_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_store_good)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::no_keepalive_load_barrier_on_weak_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::no_keepalive_load_barrier_on_phantom_oop_field_preloaded)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::store_barrier_on_native_oop_field_without_healing)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::store_barrier_on_oop_field_with_healing)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::store_barrier_on_oop_field_without_healing)) \ + ZGC_ONLY(DECLARE_FUNCTION_FROM_ADDR(declare_function_with_value, ZBarrierSetRuntime::load_barrier_on_oop_array)) \ + \ declare_function(Deoptimization::fetch_unroll_info) \ declare_function(Deoptimization::uncommon_trap) \ declare_function(Deoptimization::unpack_frames) \ @@ -851,9 +889,35 @@ #endif // INCLUDE_G1GC +#if INCLUDE_ZGC + +#define VM_INT_CONSTANTS_JVMCI_ZGC(declare_constant, declare_constant_with_value, declare_preprocessor_constant) \ + declare_constant_with_value("ZThreadLocalData::store_good_mask_offset" , in_bytes(ZThreadLocalData::store_good_mask_offset())) \ + declare_constant_with_value("ZThreadLocalData::store_bad_mask_offset" , in_bytes(ZThreadLocalData::store_bad_mask_offset())) \ + declare_constant_with_value("ZThreadLocalData::store_barrier_buffer_offset" , in_bytes(ZThreadLocalData::store_barrier_buffer_offset())) \ + declare_constant_with_value("ZStoreBarrierBuffer::current_offset" , in_bytes(ZStoreBarrierBuffer::current_offset())) \ + declare_constant_with_value("ZStoreBarrierBuffer::buffer_offset" , in_bytes(ZStoreBarrierBuffer::buffer_offset())) \ + declare_constant_with_value("ZStoreBarrierEntry::p_offset" , in_bytes(ZStoreBarrierEntry::p_offset())) \ + declare_constant_with_value("ZStoreBarrierEntry::prev_offset" , in_bytes(ZStoreBarrierEntry::prev_offset())) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_LOAD_GOOD_BEFORE_SHL)) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_LOAD_BAD_AFTER_TEST)) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_MARK_BAD_AFTER_TEST)) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_CMP)) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_STORE_BAD_AFTER_TEST)) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_OR)) \ + AMD64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_AFTER_MOV)) \ + AARCH64_ONLY(declare_constant(ZPointerLoadShift)) \ + AARCH64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_LOAD_GOOD_BEFORE_TB_X)) \ + AARCH64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_MARK_BAD_BEFORE_MOV)) \ + AARCH64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_STORE_GOOD_BEFORE_MOV)) \ + AARCH64_ONLY(declare_constant(CodeInstaller::Z_BARRIER_RELOCATION_FORMAT_STORE_BAD_BEFORE_MOV)) + +#endif // INCLUDE_ZGC + + #ifdef LINUX -#define VM_ADDRESSES_OS(declare_address, declare_preprocessor_address, declare_function) \ +#define VM_ADDRESSES_OS(declare_address, declare_preprocessor_address, declare_function, declare_function_with_value) \ declare_preprocessor_address("RTLD_DEFAULT", RTLD_DEFAULT) #endif @@ -861,7 +925,7 @@ #ifdef BSD -#define VM_ADDRESSES_OS(declare_address, declare_preprocessor_address, declare_function) \ +#define VM_ADDRESSES_OS(declare_address, declare_preprocessor_address, declare_function, declare_function_with_value) \ declare_preprocessor_address("RTLD_DEFAULT", RTLD_DEFAULT) #endif @@ -912,13 +976,17 @@ #endif #ifndef VM_ADDRESSES_OS -#define VM_ADDRESSES_OS(declare_address, declare_preprocessor_address, declare_function) +#define VM_ADDRESSES_OS(declare_address, declare_preprocessor_address, declare_function, declare_function_with_value) #endif // // Instantiation of VMStructEntries, VMTypeEntries and VMIntConstantEntries // +#define GENERATE_VM_FUNCTION_WITH_VALUE_ENTRY(name, value) \ + { QUOTE(name), CAST_FROM_FN_PTR(void*, value) }, + + // These initializers are allowed to access private fields in classes // as long as class VMStructs is a friend VMStructEntry JVMCIVMStructs::localHotSpotVMStructs[] = { @@ -969,6 +1037,11 @@ VMIntConstantEntry JVMCIVMStructs::localHotSpotVMIntConstants[] = { GENERATE_VM_INT_CONSTANT_WITH_VALUE_ENTRY, GENERATE_PREPROCESSOR_VM_INT_CONSTANT_ENTRY) #endif +#if INCLUDE_ZGC + VM_INT_CONSTANTS_JVMCI_ZGC(GENERATE_VM_INT_CONSTANT_ENTRY, + GENERATE_VM_INT_CONSTANT_WITH_VALUE_ENTRY, + GENERATE_PREPROCESSOR_VM_INT_CONSTANT_ENTRY) +#endif #ifdef VM_INT_CPU_FEATURE_CONSTANTS VM_INT_CPU_FEATURE_CONSTANTS #endif @@ -994,10 +1067,12 @@ VMLongConstantEntry JVMCIVMStructs::localHotSpotVMLongConstants[] = { VMAddressEntry JVMCIVMStructs::localHotSpotVMAddresses[] = { VM_ADDRESSES(GENERATE_VM_ADDRESS_ENTRY, GENERATE_PREPROCESSOR_VM_ADDRESS_ENTRY, - GENERATE_VM_FUNCTION_ENTRY) + GENERATE_VM_FUNCTION_ENTRY, + GENERATE_VM_FUNCTION_WITH_VALUE_ENTRY) VM_ADDRESSES_OS(GENERATE_VM_ADDRESS_ENTRY, GENERATE_PREPROCESSOR_VM_ADDRESS_ENTRY, - GENERATE_VM_FUNCTION_ENTRY) + GENERATE_VM_FUNCTION_ENTRY, + GENERATE_VM_FUNCTION_WITH_VALUE_ENTRY) GENERATE_VM_ADDRESS_LAST_ENTRY() };