diff --git a/src/hotspot/cpu/aarch64/aarch64.ad b/src/hotspot/cpu/aarch64/aarch64.ad index b81fc82c3a4a0..5bfe697d12c0b 100644 --- a/src/hotspot/cpu/aarch64/aarch64.ad +++ b/src/hotspot/cpu/aarch64/aarch64.ad @@ -1742,7 +1742,7 @@ void MachPrologNode::format(PhaseRegAlloc *ra_, outputStream *st) const { st->print("mov rscratch1, #%d\n\t", framesize - 2 * wordSize); st->print("sub sp, sp, rscratch1"); } - if (C->stub_function() == nullptr && BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { + if (C->stub_function() == nullptr) { st->print("\n\t"); st->print("ldr rscratch1, [guard]\n\t"); st->print("dmb ishld\n\t"); @@ -1792,25 +1792,23 @@ void MachPrologNode::emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { if (C->stub_function() == nullptr) { BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); - if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { - // Dummy labels for just measuring the code size - Label dummy_slow_path; - Label dummy_continuation; - Label dummy_guard; - Label* slow_path = &dummy_slow_path; - Label* continuation = &dummy_continuation; - Label* guard = &dummy_guard; - if (!Compile::current()->output()->in_scratch_emit_size()) { - // Use real labels from actual stub when not emitting code for the purpose of measuring its size - C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub(); - Compile::current()->output()->add_stub(stub); - slow_path = &stub->entry(); - continuation = &stub->continuation(); - guard = &stub->guard(); - } - // In the C2 code, we move the non-hot part of nmethod entry barriers out-of-line to a stub. - bs->nmethod_entry_barrier(masm, slow_path, continuation, guard); + // Dummy labels for just measuring the code size + Label dummy_slow_path; + Label dummy_continuation; + Label dummy_guard; + Label* slow_path = &dummy_slow_path; + Label* continuation = &dummy_continuation; + Label* guard = &dummy_guard; + if (!Compile::current()->output()->in_scratch_emit_size()) { + // Use real labels from actual stub when not emitting code for the purpose of measuring its size + C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub(); + Compile::current()->output()->add_stub(stub); + slow_path = &stub->entry(); + continuation = &stub->continuation(); + guard = &stub->guard(); } + // In the C2 code, we move the non-hot part of nmethod entry barriers out-of-line to a stub. + bs->nmethod_entry_barrier(masm, slow_path, continuation, guard); } if (VerifyStackAtCalls) { diff --git a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp index 828033975a065..869e26d335978 100644 --- a/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp @@ -285,10 +285,6 @@ void BarrierSetAssembler::clear_patching_epoch() { void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label* slow_path, Label* continuation, Label* guard) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm == nullptr) { - return; - } - Label local_guard; Label skip_barrier; NMethodPatchingType patching_type = nmethod_patching_type(); @@ -361,11 +357,6 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label* slo } void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) { - BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs == nullptr) { - return; - } - Label bad_call; __ cbz(rmethod, bad_call); diff --git a/src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp b/src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp index d5b23dc6843b6..7b8f5eca374ee 100644 --- a/src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp @@ -9900,10 +9900,7 @@ class StubGenerator: public StubCodeGenerator { // arraycopy stubs used by compilers generate_arraycopy_stubs(); - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); StubRoutines::aarch64::_spin_wait = generate_spin_wait(); diff --git a/src/hotspot/cpu/arm/arm.ad b/src/hotspot/cpu/arm/arm.ad index 617745dee20ab..6a935c26d544a 100644 --- a/src/hotspot/cpu/arm/arm.ad +++ b/src/hotspot/cpu/arm/arm.ad @@ -286,7 +286,7 @@ void MachPrologNode::format( PhaseRegAlloc *ra_, outputStream *st ) const { st->print ("SUB R_SP, R_SP, %zu", framesize); } - if (C->stub_function() == nullptr && BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { + if (C->stub_function() == nullptr) { st->print("ldr t0, [guard]\n\t"); st->print("ldr t1, [Rthread, #thread_disarmed_guard_value_offset]\n\t"); st->print("cmp t0, t1\n\t"); diff --git a/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp b/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp index 704ca71ce990d..4492c9da33e1f 100644 --- a/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp +++ b/src/hotspot/cpu/arm/gc/shared/barrierSetAssembler_arm.cpp @@ -169,10 +169,6 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm) { Register tmp0 = Rtemp; Register tmp1 = R5; // must be callee-save register - if (bs_nm == nullptr) { - return; - } - // The are no GCs that require memory barrier on arm32 now #ifdef ASSERT NMethodPatchingType patching_type = nmethod_patching_type(); diff --git a/src/hotspot/cpu/arm/stubGenerator_arm.cpp b/src/hotspot/cpu/arm/stubGenerator_arm.cpp index aad81e7891d46..fc5bcb4e0e6df 100644 --- a/src/hotspot/cpu/arm/stubGenerator_arm.cpp +++ b/src/hotspot/cpu/arm/stubGenerator_arm.cpp @@ -3176,11 +3176,7 @@ class StubGenerator: public StubCodeGenerator { // arraycopy stubs used by compilers generate_arraycopy_stubs(); - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } - + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); } void generate_compiler_stubs() { diff --git a/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp b/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp index fe10114a1954f..acf916c8c7222 100644 --- a/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp +++ b/src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp @@ -181,10 +181,6 @@ void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Re void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Register tmp) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm == nullptr) { - return; - } - assert_different_registers(tmp, R0); __ block_comment("nmethod_entry_barrier (nmethod_entry_barrier) {"); @@ -215,11 +211,6 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Register t } void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler *masm, Register tmp1, Register tmp2, Register tmp3) { - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm == nullptr) { - return; - } - assert_different_registers(tmp1, tmp2, tmp3); __ block_comment("c2i_entry_barrier (c2i_entry_barrier) {"); diff --git a/src/hotspot/cpu/ppc/stubGenerator_ppc.cpp b/src/hotspot/cpu/ppc/stubGenerator_ppc.cpp index 1749447d43bec..fa356ec13ac16 100644 --- a/src/hotspot/cpu/ppc/stubGenerator_ppc.cpp +++ b/src/hotspot/cpu/ppc/stubGenerator_ppc.cpp @@ -4881,10 +4881,7 @@ void generate_lookup_secondary_supers_table_stub() { StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop(); // nmethod entry barriers for concurrent class unloading - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); // arraycopy stubs used by compilers generate_arraycopy_stubs(); diff --git a/src/hotspot/cpu/riscv/gc/shared/barrierSetAssembler_riscv.cpp b/src/hotspot/cpu/riscv/gc/shared/barrierSetAssembler_riscv.cpp index c49c8406befec..d66a86c750a26 100644 --- a/src/hotspot/cpu/riscv/gc/shared/barrierSetAssembler_riscv.cpp +++ b/src/hotspot/cpu/riscv/gc/shared/barrierSetAssembler_riscv.cpp @@ -226,11 +226,6 @@ void BarrierSetAssembler::clear_patching_epoch() { void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label* slow_path, Label* continuation, Label* guard) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - - if (bs_nm == nullptr) { - return; - } - Assembler::IncompressibleRegion ir(masm); // Fixed length: see entry_barrier_offset() Label local_guard; @@ -320,11 +315,6 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label* slo } void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) { - BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs == nullptr) { - return; - } - Label bad_call; __ beqz(xmethod, bad_call); diff --git a/src/hotspot/cpu/riscv/riscv.ad b/src/hotspot/cpu/riscv/riscv.ad index b380e00999c30..6ee1e284f2cc3 100644 --- a/src/hotspot/cpu/riscv/riscv.ad +++ b/src/hotspot/cpu/riscv/riscv.ad @@ -1348,7 +1348,7 @@ void MachPrologNode::format(PhaseRegAlloc *ra_, outputStream *st) const { if (PreserveFramePointer) { st->print("sub fp, sp, #%d\n\t", 2 * wordSize); } st->print("sub sp, sp, #%d\n\t", framesize); - if (C->stub_function() == nullptr && BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { + if (C->stub_function() == nullptr) { st->print("ld t0, [guard]\n\t"); st->print("membar LoadLoad\n\t"); st->print("ld t1, [xthread, #thread_disarmed_guard_value_offset]\n\t"); @@ -1398,25 +1398,23 @@ void MachPrologNode::emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const { if (C->stub_function() == nullptr) { BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); - if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { - // Dummy labels for just measuring the code size - Label dummy_slow_path; - Label dummy_continuation; - Label dummy_guard; - Label* slow_path = &dummy_slow_path; - Label* continuation = &dummy_continuation; - Label* guard = &dummy_guard; - if (!Compile::current()->output()->in_scratch_emit_size()) { - // Use real labels from actual stub when not emitting code for purpose of measuring its size - C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub(); - Compile::current()->output()->add_stub(stub); - slow_path = &stub->entry(); - continuation = &stub->continuation(); - guard = &stub->guard(); - } - // In the C2 code, we move the non-hot part of nmethod entry barriers out-of-line to a stub. - bs->nmethod_entry_barrier(masm, slow_path, continuation, guard); + // Dummy labels for just measuring the code size + Label dummy_slow_path; + Label dummy_continuation; + Label dummy_guard; + Label* slow_path = &dummy_slow_path; + Label* continuation = &dummy_continuation; + Label* guard = &dummy_guard; + if (!Compile::current()->output()->in_scratch_emit_size()) { + // Use real labels from actual stub when not emitting code for purpose of measuring its size + C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub(); + Compile::current()->output()->add_stub(stub); + slow_path = &stub->entry(); + continuation = &stub->continuation(); + guard = &stub->guard(); } + // In the C2 code, we move the non-hot part of nmethod entry barriers out-of-line to a stub. + bs->nmethod_entry_barrier(masm, slow_path, continuation, guard); } if (VerifyStackAtCalls) { diff --git a/src/hotspot/cpu/riscv/stubGenerator_riscv.cpp b/src/hotspot/cpu/riscv/stubGenerator_riscv.cpp index 249b871bd00bc..e9130590ae0ec 100644 --- a/src/hotspot/cpu/riscv/stubGenerator_riscv.cpp +++ b/src/hotspot/cpu/riscv/stubGenerator_riscv.cpp @@ -6544,10 +6544,7 @@ static const int64_t right_3_bits = right_n_bits(3); // arraycopy stubs used by compilers generate_arraycopy_stubs(); - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); #ifdef COMPILER2 if (UseSecondarySupersTable) { diff --git a/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp b/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp index d6fe10ac9c232..e78906708afd7 100644 --- a/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp +++ b/src/hotspot/cpu/s390/gc/shared/barrierSetAssembler_s390.cpp @@ -171,10 +171,6 @@ void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Re void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm == nullptr) { - return; - } - __ block_comment("nmethod_entry_barrier (nmethod_entry_barrier) {"); // Load jump addr: diff --git a/src/hotspot/cpu/s390/stubGenerator_s390.cpp b/src/hotspot/cpu/s390/stubGenerator_s390.cpp index f542c125b3639..b46393f543e87 100644 --- a/src/hotspot/cpu/s390/stubGenerator_s390.cpp +++ b/src/hotspot/cpu/s390/stubGenerator_s390.cpp @@ -3217,10 +3217,7 @@ class StubGenerator: public StubCodeGenerator { generate_arraycopy_stubs(); // nmethod entry barriers for concurrent class unloading - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); #ifdef COMPILER2 if (UseSecondarySupersTable) { diff --git a/src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp b/src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp index d63f230b9706a..f78f45f6f0b86 100644 --- a/src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp +++ b/src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp @@ -134,21 +134,19 @@ void C2_MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool if (!is_stub) { BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler(); #ifdef _LP64 - if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { - // We put the non-hot code of the nmethod entry barrier out-of-line in a stub. - Label dummy_slow_path; - Label dummy_continuation; - Label* slow_path = &dummy_slow_path; - Label* continuation = &dummy_continuation; - if (!Compile::current()->output()->in_scratch_emit_size()) { - // Use real labels from actual stub when not emitting code for the purpose of measuring its size - C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub(); - Compile::current()->output()->add_stub(stub); - slow_path = &stub->entry(); - continuation = &stub->continuation(); - } - bs->nmethod_entry_barrier(this, slow_path, continuation); + // We put the non-hot code of the nmethod entry barrier out-of-line in a stub. + Label dummy_slow_path; + Label dummy_continuation; + Label* slow_path = &dummy_slow_path; + Label* continuation = &dummy_continuation; + if (!Compile::current()->output()->in_scratch_emit_size()) { + // Use real labels from actual stub when not emitting code for the purpose of measuring its size + C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub(); + Compile::current()->output()->add_stub(stub); + slow_path = &stub->entry(); + continuation = &stub->continuation(); } + bs->nmethod_entry_barrier(this, slow_path, continuation); #else // Don't bother with out-of-line nmethod entry barrier stub for x86_32. bs->nmethod_entry_barrier(this, nullptr /* slow_path */, nullptr /* continuation */); diff --git a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp index 50dea42d5a300..5962609d08ede 100644 --- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp +++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp @@ -354,9 +354,6 @@ void BarrierSetAssembler::tlab_allocate(MacroAssembler* masm, #ifdef _LP64 void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label* slow_path, Label* continuation) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm == nullptr) { - return; - } Register thread = r15_thread; Address disarmed_addr(thread, in_bytes(bs_nm->thread_disarmed_guard_value_offset())); // The immediate is the last 4 bytes, so if we align the start of the cmp @@ -381,10 +378,6 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label* slo #else void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label*, Label*) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm == nullptr) { - return; - } - Label continuation; Register tmp = rdi; @@ -401,11 +394,6 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Label*, La #endif void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) { - BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs == nullptr) { - return; - } - Label bad_call; __ cmpptr(rbx, 0); // rbx contains the incoming method for c2i adapters. __ jcc(Assembler::equal, bad_call); diff --git a/src/hotspot/cpu/x86/stubGenerator_x86_32.cpp b/src/hotspot/cpu/x86/stubGenerator_x86_32.cpp index 27dc804a73d28..9ec556777b07d 100644 --- a/src/hotspot/cpu/x86/stubGenerator_x86_32.cpp +++ b/src/hotspot/cpu/x86/stubGenerator_x86_32.cpp @@ -4211,10 +4211,7 @@ class StubGenerator: public StubCodeGenerator { // arraycopy stubs used by compilers generate_arraycopy_stubs(); - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); } void generate_compiler_stubs() { diff --git a/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp b/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp index d2a0c81b2c98a..a27748ac4725c 100644 --- a/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp +++ b/src/hotspot/cpu/x86/stubGenerator_x86_64.cpp @@ -4136,10 +4136,7 @@ void StubGenerator::generate_final_stubs() { // arraycopy stubs used by compilers generate_arraycopy_stubs(); - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); - } + StubRoutines::_method_entry_barrier = generate_method_entry_barrier(); #ifdef COMPILER2 if (UseSecondarySupersTable) { diff --git a/src/hotspot/cpu/x86/x86_64.ad b/src/hotspot/cpu/x86/x86_64.ad index 8cc4a970bfd9f..1116908eff165 100644 --- a/src/hotspot/cpu/x86/x86_64.ad +++ b/src/hotspot/cpu/x86/x86_64.ad @@ -818,7 +818,7 @@ void MachPrologNode::format(PhaseRegAlloc* ra_, outputStream* st) const { st->print("# stack alignment check"); #endif } - if (C->stub_function() != nullptr && BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) { + if (C->stub_function() != nullptr) { st->print("\n\t"); st->print("cmpl [r15_thread + #disarmed_guard_value_offset], #disarmed_guard_value\t"); st->print("\n\t"); diff --git a/src/hotspot/share/code/codeCache.cpp b/src/hotspot/share/code/codeCache.cpp index 3b237e9643e6a..e59417466d6d9 100644 --- a/src/hotspot/share/code/codeCache.cpp +++ b/src/hotspot/share/code/codeCache.cpp @@ -868,10 +868,7 @@ void CodeCache::on_gc_marking_cycle_finish() { } void CodeCache::arm_all_nmethods() { - BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - bs_nm->arm_all_nmethods(); - } + BarrierSet::barrier_set()->barrier_set_nmethod()->arm_all_nmethods(); } // Mark nmethods for unloading if they contain otherwise unreachable oops. diff --git a/src/hotspot/share/code/nmethod.cpp b/src/hotspot/share/code/nmethod.cpp index 23a2dee4d3577..9a7a16d1ee3c4 100644 --- a/src/hotspot/share/code/nmethod.cpp +++ b/src/hotspot/share/code/nmethod.cpp @@ -688,10 +688,6 @@ address nmethod::oops_reloc_begin() const { return code_begin() + frame_complete_offset(); } - // It is not safe to read oops concurrently using entry barriers, if their - // location depend on whether the nmethod is entrant or not. - // assert(BarrierSet::barrier_set()->barrier_set_nmethod() == nullptr, "Not safe oop scan"); - address low_boundary = verified_entry_point(); if (!is_in_use()) { low_boundary += NativeJump::instruction_size; diff --git a/src/hotspot/share/gc/g1/g1NMethodClosure.cpp b/src/hotspot/share/gc/g1/g1NMethodClosure.cpp index 8dca2d7cccf7b..8dc817f145224 100644 --- a/src/hotspot/share/gc/g1/g1NMethodClosure.cpp +++ b/src/hotspot/share/gc/g1/g1NMethodClosure.cpp @@ -84,9 +84,7 @@ void G1NMethodClosure::do_evacuation_and_fixup(nmethod* nm) { nm->mark_as_maybe_on_stack(); BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - bs_nm->disarm(nm); - } + bs_nm->disarm(nm); } nm->fix_oop_relocations(); @@ -100,9 +98,7 @@ void G1NMethodClosure::do_marking(nmethod* nm) { nm->mark_as_maybe_on_stack(); BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - bs_nm->disarm(nm); - } + bs_nm->disarm(nm); // The oops were only marked, no need to update oop relocations. } diff --git a/src/hotspot/share/gc/shared/barrierSet.cpp b/src/hotspot/share/gc/shared/barrierSet.cpp index 65ad476adc426..a30b23ce2d996 100644 --- a/src/hotspot/share/gc/shared/barrierSet.cpp +++ b/src/hotspot/share/gc/shared/barrierSet.cpp @@ -86,9 +86,7 @@ BarrierSet::BarrierSet(BarrierSetAssembler* barrier_set_assembler, void BarrierSet::on_thread_attach(Thread* thread) { BarrierSetNMethod* bs_nm = barrier_set_nmethod(); - if (bs_nm != nullptr) { - thread->set_nmethod_disarmed_guard_value(bs_nm->disarmed_guard_value()); - } + thread->set_nmethod_disarmed_guard_value(bs_nm->disarmed_guard_value()); } // Called from init.cpp diff --git a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp index 7241869de5680..446e19144074e 100644 --- a/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp +++ b/src/hotspot/share/gc/shenandoah/shenandoahBarrierSet.cpp @@ -125,9 +125,7 @@ void ShenandoahBarrierSet::on_thread_attach(Thread *thread) { ShenandoahThreadLocalData::initialize_gclab(thread); BarrierSetNMethod* bs_nm = barrier_set_nmethod(); - if (bs_nm != nullptr) { - thread->set_nmethod_disarmed_guard_value(bs_nm->disarmed_guard_value()); - } + thread->set_nmethod_disarmed_guard_value(bs_nm->disarmed_guard_value()); if (ShenandoahStackWatermarkBarrier) { JavaThread* const jt = JavaThread::cast(thread); diff --git a/src/hotspot/share/interpreter/interpreterRuntime.cpp b/src/hotspot/share/interpreter/interpreterRuntime.cpp index 2377c74868499..52eab4f796e6b 100644 --- a/src/hotspot/share/interpreter/interpreterRuntime.cpp +++ b/src/hotspot/share/interpreter/interpreterRuntime.cpp @@ -1033,7 +1033,7 @@ nmethod* InterpreterRuntime::frequency_counter_overflow(JavaThread* current, add int bci = method->bci_from(last_frame.bcp()); nm = method->lookup_osr_nmethod_for(bci, CompLevel_none, false); BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (nm != nullptr && bs_nm != nullptr) { + if (nm != nullptr) { // in case the transition passed a safepoint we need to barrier this again if (!bs_nm->nmethod_osr_entry_barrier(nm)) { nm = nullptr; @@ -1074,7 +1074,7 @@ JRT_ENTRY(nmethod*, nmethod* osr_nm = CompilationPolicy::event(method, method, branch_bci, bci, CompLevel_none, nullptr, CHECK_NULL); BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (osr_nm != nullptr && bs_nm != nullptr) { + if (osr_nm != nullptr) { if (!bs_nm->nmethod_osr_entry_barrier(osr_nm)) { osr_nm = nullptr; } diff --git a/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp b/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp index a2f6f7f6d5213..a729e34a2796b 100644 --- a/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp +++ b/src/hotspot/share/jvmci/jvmciCompilerToVMInit.cpp @@ -165,13 +165,11 @@ void CompilerToVM::Data::initialize(JVMCI_TRAPS) { SharedRuntime_throw_delayed_StackOverflowError_entry = SharedRuntime::throw_delayed_StackOverflowError_entry(); BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - thread_disarmed_guard_value_offset = in_bytes(bs_nm->thread_disarmed_guard_value_offset()); - nmethod_entry_barrier = StubRoutines::method_entry_barrier(); - BarrierSetAssembler* bs_asm = BarrierSet::barrier_set()->barrier_set_assembler(); - AARCH64_ONLY(BarrierSetAssembler_nmethod_patching_type = (int) bs_asm->nmethod_patching_type()); - AARCH64_ONLY(BarrierSetAssembler_patching_epoch_addr = bs_asm->patching_epoch_addr()); - } + thread_disarmed_guard_value_offset = in_bytes(bs_nm->thread_disarmed_guard_value_offset()); + nmethod_entry_barrier = StubRoutines::method_entry_barrier(); + BarrierSetAssembler* bs_asm = BarrierSet::barrier_set()->barrier_set_assembler(); + AARCH64_ONLY(BarrierSetAssembler_nmethod_patching_type = (int) bs_asm->nmethod_patching_type()); + AARCH64_ONLY(BarrierSetAssembler_patching_epoch_addr = bs_asm->patching_epoch_addr()); #if INCLUDE_ZGC if (UseZGC) { diff --git a/src/hotspot/share/jvmci/jvmciEnv.cpp b/src/hotspot/share/jvmci/jvmciEnv.cpp index 78f5bca476957..e628a1b2e512f 100644 --- a/src/hotspot/share/jvmci/jvmciEnv.cpp +++ b/src/hotspot/share/jvmci/jvmciEnv.cpp @@ -1864,9 +1864,7 @@ CodeBlob* JVMCIEnv::get_code_blob(JVMCIObject obj) { void JVMCINMethodHandle::set_nmethod(nmethod* nm) { BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - bs_nm->nmethod_entry_barrier(nm); - } + bs_nm->nmethod_entry_barrier(nm); _thread->set_live_nmethod(nm); } diff --git a/src/hotspot/share/memory/iterator.cpp b/src/hotspot/share/memory/iterator.cpp index 9569822e03f6f..961130c2b3f0a 100644 --- a/src/hotspot/share/memory/iterator.cpp +++ b/src/hotspot/share/memory/iterator.cpp @@ -58,9 +58,7 @@ void MarkingNMethodClosure::do_nmethod(nmethod* nm) { nm->mark_as_maybe_on_stack(); BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod(); - if (bs_nm != nullptr) { - bs_nm->disarm(nm); - } + bs_nm->disarm(nm); } if (_fix_relocations) {