comparison src/share/vm/c1/c1_LIRGenerator.cpp @ 3249:e1162778c1c8

7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer. Reviewed-by: kvn, iveresov, never, tonyp, dholmes
author johnc
date Thu, 07 Apr 2011 09:53:20 -0700
parents 8033953d67ff
children 59766fd005ff
comparison
equal deleted inserted replaced
3248:e6beb62de02d 3249:e1162778c1c8
1102 __ return_op(result.result()); 1102 __ return_op(result.result());
1103 } 1103 }
1104 set_no_result(x); 1104 set_no_result(x);
1105 } 1105 }
1106 1106
1107 // Examble: ref.get()
1108 // Combination of LoadField and g1 pre-write barrier
1109 void LIRGenerator::do_Reference_get(Intrinsic* x) {
1110
1111 const int referent_offset = java_lang_ref_Reference::referent_offset;
1112 guarantee(referent_offset > 0, "referent offset not initialized");
1113
1114 assert(x->number_of_arguments() == 1, "wrong type");
1115
1116 LIRItem reference(x->argument_at(0), this);
1117 reference.load_item();
1118
1119 // need to perform the null check on the reference objecy
1120 CodeEmitInfo* info = NULL;
1121 if (x->needs_null_check()) {
1122 info = state_for(x);
1123 }
1124
1125 LIR_Address* referent_field_adr =
1126 new LIR_Address(reference.result(), referent_offset, T_OBJECT);
1127
1128 LIR_Opr result = rlock_result(x);
1129
1130 __ load(referent_field_adr, result, info);
1131
1132 // Register the value in the referent field with the pre-barrier
1133 pre_barrier(LIR_OprFact::illegalOpr /* addr_opr */,
1134 result /* pre_val */,
1135 false /* do_load */,
1136 false /* patch */,
1137 NULL /* info */);
1138 }
1107 1139
1108 // Example: object.getClass () 1140 // Example: object.getClass ()
1109 void LIRGenerator::do_getClass(Intrinsic* x) { 1141 void LIRGenerator::do_getClass(Intrinsic* x) {
1110 assert(x->number_of_arguments() == 1, "wrong type"); 1142 assert(x->number_of_arguments() == 1, "wrong type");
1111 1143
1244 return result; 1276 return result;
1245 } 1277 }
1246 1278
1247 // Various barriers 1279 // Various barriers
1248 1280
1249 void LIRGenerator::pre_barrier(LIR_Opr addr_opr, bool patch, CodeEmitInfo* info) { 1281 void LIRGenerator::pre_barrier(LIR_Opr addr_opr, LIR_Opr pre_val,
1282 bool do_load, bool patch, CodeEmitInfo* info) {
1250 // Do the pre-write barrier, if any. 1283 // Do the pre-write barrier, if any.
1251 switch (_bs->kind()) { 1284 switch (_bs->kind()) {
1252 #ifndef SERIALGC 1285 #ifndef SERIALGC
1253 case BarrierSet::G1SATBCT: 1286 case BarrierSet::G1SATBCT:
1254 case BarrierSet::G1SATBCTLogging: 1287 case BarrierSet::G1SATBCTLogging:
1255 G1SATBCardTableModRef_pre_barrier(addr_opr, patch, info); 1288 G1SATBCardTableModRef_pre_barrier(addr_opr, pre_val, do_load, patch, info);
1256 break; 1289 break;
1257 #endif // SERIALGC 1290 #endif // SERIALGC
1258 case BarrierSet::CardTableModRef: 1291 case BarrierSet::CardTableModRef:
1259 case BarrierSet::CardTableExtension: 1292 case BarrierSet::CardTableExtension:
1260 // No pre barriers 1293 // No pre barriers
1291 } 1324 }
1292 1325
1293 //////////////////////////////////////////////////////////////////////// 1326 ////////////////////////////////////////////////////////////////////////
1294 #ifndef SERIALGC 1327 #ifndef SERIALGC
1295 1328
1296 void LIRGenerator::G1SATBCardTableModRef_pre_barrier(LIR_Opr addr_opr, bool patch, CodeEmitInfo* info) { 1329 void LIRGenerator::G1SATBCardTableModRef_pre_barrier(LIR_Opr addr_opr, LIR_Opr pre_val,
1297 if (G1DisablePreBarrier) return; 1330 bool do_load, bool patch, CodeEmitInfo* info) {
1298
1299 // First we test whether marking is in progress. 1331 // First we test whether marking is in progress.
1300 BasicType flag_type; 1332 BasicType flag_type;
1301 if (in_bytes(PtrQueue::byte_width_of_active()) == 4) { 1333 if (in_bytes(PtrQueue::byte_width_of_active()) == 4) {
1302 flag_type = T_INT; 1334 flag_type = T_INT;
1303 } else { 1335 } else {
1312 PtrQueue::byte_offset_of_active()), 1344 PtrQueue::byte_offset_of_active()),
1313 flag_type); 1345 flag_type);
1314 // Read the marking-in-progress flag. 1346 // Read the marking-in-progress flag.
1315 LIR_Opr flag_val = new_register(T_INT); 1347 LIR_Opr flag_val = new_register(T_INT);
1316 __ load(mark_active_flag_addr, flag_val); 1348 __ load(mark_active_flag_addr, flag_val);
1317
1318 LIR_PatchCode pre_val_patch_code =
1319 patch ? lir_patch_normal : lir_patch_none;
1320
1321 LIR_Opr pre_val = new_register(T_OBJECT);
1322
1323 __ cmp(lir_cond_notEqual, flag_val, LIR_OprFact::intConst(0)); 1349 __ cmp(lir_cond_notEqual, flag_val, LIR_OprFact::intConst(0));
1324 if (!addr_opr->is_address()) { 1350
1325 assert(addr_opr->is_register(), "must be"); 1351 LIR_PatchCode pre_val_patch_code = lir_patch_none;
1326 addr_opr = LIR_OprFact::address(new LIR_Address(addr_opr, T_OBJECT)); 1352
1327 } 1353 CodeStub* slow;
1328 CodeStub* slow = new G1PreBarrierStub(addr_opr, pre_val, pre_val_patch_code, 1354
1329 info); 1355 if (do_load) {
1356 assert(pre_val == LIR_OprFact::illegalOpr, "sanity");
1357 assert(addr_opr != LIR_OprFact::illegalOpr, "sanity");
1358
1359 if (patch)
1360 pre_val_patch_code = lir_patch_normal;
1361
1362 pre_val = new_register(T_OBJECT);
1363
1364 if (!addr_opr->is_address()) {
1365 assert(addr_opr->is_register(), "must be");
1366 addr_opr = LIR_OprFact::address(new LIR_Address(addr_opr, T_OBJECT));
1367 }
1368 slow = new G1PreBarrierStub(addr_opr, pre_val, pre_val_patch_code, info);
1369 } else {
1370 assert(addr_opr == LIR_OprFact::illegalOpr, "sanity");
1371 assert(pre_val->is_register(), "must be");
1372 assert(pre_val->type() == T_OBJECT, "must be an object");
1373 assert(info == NULL, "sanity");
1374
1375 slow = new G1PreBarrierStub(pre_val);
1376 }
1377
1330 __ branch(lir_cond_notEqual, T_INT, slow); 1378 __ branch(lir_cond_notEqual, T_INT, slow);
1331 __ branch_destination(slow->continuation()); 1379 __ branch_destination(slow->continuation());
1332 } 1380 }
1333 1381
1334 void LIRGenerator::G1SATBCardTableModRef_post_barrier(LIR_OprDesc* addr, LIR_OprDesc* new_val) { 1382 void LIRGenerator::G1SATBCardTableModRef_post_barrier(LIR_OprDesc* addr, LIR_OprDesc* new_val) {
1335 if (G1DisablePostBarrier) return;
1336
1337 // If the "new_val" is a constant NULL, no barrier is necessary. 1383 // If the "new_val" is a constant NULL, no barrier is necessary.
1338 if (new_val->is_constant() && 1384 if (new_val->is_constant() &&
1339 new_val->as_constant_ptr()->as_jobject() == NULL) return; 1385 new_val->as_constant_ptr()->as_jobject() == NULL) return;
1340 1386
1341 if (!new_val->is_register()) { 1387 if (!new_val->is_register()) {
1553 } 1599 }
1554 1600
1555 if (is_oop) { 1601 if (is_oop) {
1556 // Do the pre-write barrier, if any. 1602 // Do the pre-write barrier, if any.
1557 pre_barrier(LIR_OprFact::address(address), 1603 pre_barrier(LIR_OprFact::address(address),
1604 LIR_OprFact::illegalOpr /* pre_val */,
1605 true /* do_load*/,
1558 needs_patching, 1606 needs_patching,
1559 (info ? new CodeEmitInfo(info) : NULL)); 1607 (info ? new CodeEmitInfo(info) : NULL));
1560 } 1608 }
1561 1609
1562 if (is_volatile && !needs_patching) { 1610 if (is_volatile && !needs_patching) {
1982 LIRItem off(x->offset(), this); 2030 LIRItem off(x->offset(), this);
1983 2031
1984 off.load_item(); 2032 off.load_item();
1985 src.load_item(); 2033 src.load_item();
1986 2034
1987 LIR_Opr reg = reg = rlock_result(x, x->basic_type()); 2035 LIR_Opr reg = rlock_result(x, x->basic_type());
1988 2036
1989 get_Object_unsafe(reg, src.result(), off.result(), type, x->is_volatile()); 2037 get_Object_unsafe(reg, src.result(), off.result(), type, x->is_volatile());
2038
2039 #ifndef SERIALGC
2040 // We might be reading the value of the referent field of a
2041 // Reference object in order to attach it back to the live
2042 // object graph. If G1 is enabled then we need to record
2043 // the value that is being returned in an SATB log buffer.
2044 //
2045 // We need to generate code similar to the following...
2046 //
2047 // if (offset == java_lang_ref_Reference::referent_offset) {
2048 // if (src != NULL) {
2049 // if (klass(src)->reference_type() != REF_NONE) {
2050 // pre_barrier(..., reg, ...);
2051 // }
2052 // }
2053 // }
2054 //
2055 // The first non-constant check of either the offset or
2056 // the src operand will be done here; the remainder
2057 // will take place in the generated code stub.
2058
2059 if (UseG1GC && type == T_OBJECT) {
2060 bool gen_code_stub = true; // Assume we need to generate the slow code stub.
2061 bool gen_offset_check = true; // Assume the code stub has to generate the offset guard.
2062 bool gen_source_check = true; // Assume the code stub has to check the src object for null.
2063
2064 if (off.is_constant()) {
2065 jint off_con = off.get_jint_constant();
2066
2067 if (off_con != java_lang_ref_Reference::referent_offset) {
2068 // The constant offset is something other than referent_offset.
2069 // We can skip generating/checking the remaining guards and
2070 // skip generation of the code stub.
2071 gen_code_stub = false;
2072 } else {
2073 // The constant offset is the same as referent_offset -
2074 // we do not need to generate a runtime offset check.
2075 gen_offset_check = false;
2076 }
2077 }
2078
2079 // We don't need to generate stub if the source object is an array
2080 if (gen_code_stub && src.type()->is_array()) {
2081 gen_code_stub = false;
2082 }
2083
2084 if (gen_code_stub) {
2085 // We still need to continue with the checks.
2086 if (src.is_constant()) {
2087 ciObject* src_con = src.get_jobject_constant();
2088
2089 if (src_con->is_null_object()) {
2090 // The constant src object is null - We can skip
2091 // generating the code stub.
2092 gen_code_stub = false;
2093 } else {
2094 // Non-null constant source object. We still have to generate
2095 // the slow stub - but we don't need to generate the runtime
2096 // null object check.
2097 gen_source_check = false;
2098 }
2099 }
2100 }
2101
2102 if (gen_code_stub) {
2103 // Temoraries.
2104 LIR_Opr src_klass = new_register(T_OBJECT);
2105
2106 // Get the thread pointer for the pre-barrier
2107 LIR_Opr thread = getThreadPointer();
2108
2109 CodeStub* stub;
2110
2111 // We can have generate one runtime check here. Let's start with
2112 // the offset check.
2113 if (gen_offset_check) {
2114 // if (offset == referent_offset) -> slow code stub
2115 __ cmp(lir_cond_equal, off.result(),
2116 LIR_OprFact::intConst(java_lang_ref_Reference::referent_offset));
2117
2118 // Optionally generate "src == null" check.
2119 stub = new G1UnsafeGetObjSATBBarrierStub(reg, src.result(),
2120 src_klass, thread,
2121 gen_source_check);
2122
2123 __ branch(lir_cond_equal, T_INT, stub);
2124 } else {
2125 if (gen_source_check) {
2126 // offset is a const and equals referent offset
2127 // if (source != null) -> slow code stub
2128 __ cmp(lir_cond_notEqual, src.result(), LIR_OprFact::oopConst(NULL));
2129
2130 // Since we are generating the "if src == null" guard here,
2131 // there is no need to generate the "src == null" check again.
2132 stub = new G1UnsafeGetObjSATBBarrierStub(reg, src.result(),
2133 src_klass, thread,
2134 false);
2135
2136 __ branch(lir_cond_notEqual, T_OBJECT, stub);
2137 } else {
2138 // We have statically determined that offset == referent_offset
2139 // && src != null so we unconditionally branch to code stub
2140 // to perform the guards and record reg in the SATB log buffer.
2141
2142 stub = new G1UnsafeGetObjSATBBarrierStub(reg, src.result(),
2143 src_klass, thread,
2144 false);
2145
2146 __ branch(lir_cond_always, T_ILLEGAL, stub);
2147 }
2148 }
2149
2150 // Continuation point
2151 __ branch_destination(stub->continuation());
2152 }
2153 }
2154 #endif // SERIALGC
2155
1990 if (x->is_volatile() && os::is_MP()) __ membar_acquire(); 2156 if (x->is_volatile() && os::is_MP()) __ membar_acquire();
1991 } 2157 }
1992 2158
1993 2159
1994 void LIRGenerator::do_UnsafePutObject(UnsafePutObject* x) { 2160 void LIRGenerator::do_UnsafePutObject(UnsafePutObject* x) {
2650 // sun.misc.AtomicLongCSImpl.attemptUpdate 2816 // sun.misc.AtomicLongCSImpl.attemptUpdate
2651 case vmIntrinsics::_attemptUpdate: 2817 case vmIntrinsics::_attemptUpdate:
2652 do_AttemptUpdate(x); 2818 do_AttemptUpdate(x);
2653 break; 2819 break;
2654 2820
2821 case vmIntrinsics::_Reference_get:
2822 do_Reference_get(x);
2823 break;
2824
2655 default: ShouldNotReachHere(); break; 2825 default: ShouldNotReachHere(); break;
2656 } 2826 }
2657 } 2827 }
2658 2828
2659 void LIRGenerator::do_ProfileCall(ProfileCall* x) { 2829 void LIRGenerator::do_ProfileCall(ProfileCall* x) {