Skip to content

Commit e78cfbe

Browse files
committed
8369946: Bytecode rewriting causes Java heap corruption on PPC
Backport-of: f5b155bda2088c269bef221cbf4a7d1909d1b2ef
1 parent 74b7d69 commit e78cfbe

File tree

3 files changed

+19
-7
lines changed

3 files changed

+19
-7
lines changed

src/hotspot/cpu/ppc/interp_masm_ppc.hpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
126126

127127
void get_cache_index_at_bcp(Register Rdst, int bcp_offset, size_t index_size);
128128

129-
void get_cache_and_index_at_bcp(Register cache, int bcp_offset, size_t index_size = sizeof(u2));
129+
void get_cache_and_index_at_bcp(Register cache, int bcp_offset, size_t index_size = sizeof(u2),
130+
bool for_fast_bytecode = false);
130131

131132
void get_u4(Register Rdst, Register Rsrc, int offset, signedOrNot is_signed);
132133

src/hotspot/cpu/ppc/interp_masm_ppc_64.cpp

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -450,10 +450,18 @@ void InterpreterMacroAssembler::get_cache_index_at_bcp(Register Rdst, int bcp_of
450450
}
451451

452452
void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache, int bcp_offset,
453-
size_t index_size) {
453+
size_t index_size, bool for_fast_bytecode) {
454454
get_cache_index_at_bcp(cache, bcp_offset, index_size);
455455
sldi(cache, cache, exact_log2(in_words(ConstantPoolCacheEntry::size()) * BytesPerWord));
456456
add(cache, R27_constPoolCache, cache);
457+
458+
if (for_fast_bytecode) {
459+
// Prevent speculative loading from ConstantPoolCacheEntry as it can miss the info written by another thread.
460+
// TemplateTable::patch_bytecode uses release-store.
461+
// We reached here via control dependency (Bytecode dispatch has used the rewritten Bytecode).
462+
// So, we can use control-isync based ordering.
463+
isync();
464+
}
457465
}
458466

459467
// Load 4-byte signed or unsigned integer in Java format (that is, big-endian format)

src/hotspot/cpu/ppc/templateTable_ppc_64.cpp

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,9 @@ void TemplateTable::patch_bytecode(Bytecodes::Code new_bc, Register Rnew_bc, Reg
146146
__ bind(L_fast_patch);
147147
}
148148

149-
// Patch bytecode.
149+
// Patch bytecode with release store to coordinate with ConstantPoolCacheEntry
150+
// loads in fast bytecode codelets.
151+
__ release();
150152
__ stb(Rnew_bc, 0, R14_bcp);
151153

152154
__ bind(L_patch_done);
@@ -310,6 +312,7 @@ void TemplateTable::fast_aldc(bool wide) {
310312
// We are resolved if the resolved reference cache entry contains a
311313
// non-null object (CallSite, etc.)
312314
__ get_cache_index_at_bcp(R31, 1, index_size); // Load index.
315+
// Only rewritten during link time. So, no need for memory barriers for accessing resolved info.
313316
__ load_resolved_reference_at_index(R17_tos, R31, R11_scratch1, R12_scratch2, &is_null);
314317

315318
// Convert null sentinel to NULL
@@ -2309,7 +2312,7 @@ void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
23092312
if (is_invokevfinal) {
23102313
assert(Ritable_index == noreg, "register not used");
23112314
// Already resolved.
2312-
__ get_cache_and_index_at_bcp(Rcache, 1);
2315+
__ get_cache_and_index_at_bcp(Rcache, 1, sizeof(u2), /* for_fast_bytecode */ true);
23132316
} else {
23142317
resolve_cache_and_index(byte_no, Rcache, /* temp */ Rmethod, is_invokedynamic ? sizeof(u4) : sizeof(u2));
23152318
}
@@ -3016,7 +3019,7 @@ void TemplateTable::fast_storefield(TosState state) {
30163019
const ConditionRegister CR_is_vol = CCR2; // Non-volatile condition register (survives runtime call in do_oop_store).
30173020

30183021
// Constant pool already resolved => Load flags and offset of field.
3019-
__ get_cache_and_index_at_bcp(Rcache, 1);
3022+
__ get_cache_and_index_at_bcp(Rcache, 1, sizeof(u2), /* for_fast_bytecode */ true);
30203023
jvmti_post_field_mod(Rcache, Rscratch, false /* not static */);
30213024
load_field_cp_cache_entry(noreg, Rcache, noreg, Roffset, Rflags, false); // Uses R11, R12
30223025

@@ -3097,7 +3100,7 @@ void TemplateTable::fast_accessfield(TosState state) {
30973100
// R12_scratch2 used by load_field_cp_cache_entry
30983101

30993102
// Constant pool already resolved. Get the field offset.
3100-
__ get_cache_and_index_at_bcp(Rcache, 1);
3103+
__ get_cache_and_index_at_bcp(Rcache, 1, sizeof(u2), /* for_fast_bytecode */ true);
31013104
load_field_cp_cache_entry(noreg, Rcache, noreg, Roffset, Rflags, false); // Uses R11, R12
31023105

31033106
// JVMTI support
@@ -3236,7 +3239,7 @@ void TemplateTable::fast_xaccess(TosState state) {
32363239
__ ld(Rclass_or_obj, 0, R18_locals);
32373240

32383241
// Constant pool already resolved. Get the field offset.
3239-
__ get_cache_and_index_at_bcp(Rcache, 2);
3242+
__ get_cache_and_index_at_bcp(Rcache, 2, sizeof(u2), /* for_fast_bytecode */ true);
32403243
load_field_cp_cache_entry(noreg, Rcache, noreg, Roffset, Rflags, false); // Uses R11, R12
32413244

32423245
// JVMTI support not needed, since we switch back to single bytecode as soon as debugger attaches.

0 commit comments

Comments
 (0)