comparison src/share/vm/c1/c1_LIRGenerator.cpp @ 3464:be4ca325525a

Merge.
author Thomas Wuerthinger <thomas@wuerthinger.net>
date Wed, 27 Jul 2011 17:32:44 -0700
parents 0654ee04b214 3275a6560cf7
children 04b9a2566eec
comparison
equal deleted inserted replaced
3239:7c4b4daac19b 3464:be4ca325525a
1207 __ return_op(result.result()); 1207 __ return_op(result.result());
1208 } 1208 }
1209 set_no_result(x); 1209 set_no_result(x);
1210 } 1210 }
1211 1211
1212 // Examble: ref.get()
1213 // Combination of LoadField and g1 pre-write barrier
1214 void LIRGenerator::do_Reference_get(Intrinsic* x) {
1215
1216 const int referent_offset = java_lang_ref_Reference::referent_offset;
1217 guarantee(referent_offset > 0, "referent offset not initialized");
1218
1219 assert(x->number_of_arguments() == 1, "wrong type");
1220
1221 LIRItem reference(x->argument_at(0), this);
1222 reference.load_item();
1223
1224 // need to perform the null check on the reference objecy
1225 CodeEmitInfo* info = NULL;
1226 if (x->needs_null_check()) {
1227 info = state_for(x);
1228 }
1229
1230 LIR_Address* referent_field_adr =
1231 new LIR_Address(reference.result(), referent_offset, T_OBJECT);
1232
1233 LIR_Opr result = rlock_result(x);
1234
1235 __ load(referent_field_adr, result, info);
1236
1237 // Register the value in the referent field with the pre-barrier
1238 pre_barrier(LIR_OprFact::illegalOpr /* addr_opr */,
1239 result /* pre_val */,
1240 false /* do_load */,
1241 false /* patch */,
1242 NULL /* info */);
1243 }
1212 1244
1213 // Example: object.getClass () 1245 // Example: object.getClass ()
1214 void LIRGenerator::do_getClass(Intrinsic* x) { 1246 void LIRGenerator::do_getClass(Intrinsic* x) {
1215 assert(x->number_of_arguments() == 1, "wrong type"); 1247 assert(x->number_of_arguments() == 1, "wrong type");
1216 1248
1349 return result; 1381 return result;
1350 } 1382 }
1351 1383
1352 // Various barriers 1384 // Various barriers
1353 1385
1354 void LIRGenerator::pre_barrier(LIR_Opr addr_opr, bool patch, CodeEmitInfo* info) { 1386 void LIRGenerator::pre_barrier(LIR_Opr addr_opr, LIR_Opr pre_val,
1387 bool do_load, bool patch, CodeEmitInfo* info) {
1355 // Do the pre-write barrier, if any. 1388 // Do the pre-write barrier, if any.
1356 switch (_bs->kind()) { 1389 switch (_bs->kind()) {
1357 #ifndef SERIALGC 1390 #ifndef SERIALGC
1358 case BarrierSet::G1SATBCT: 1391 case BarrierSet::G1SATBCT:
1359 case BarrierSet::G1SATBCTLogging: 1392 case BarrierSet::G1SATBCTLogging:
1360 G1SATBCardTableModRef_pre_barrier(addr_opr, patch, info); 1393 G1SATBCardTableModRef_pre_barrier(addr_opr, pre_val, do_load, patch, info);
1361 break; 1394 break;
1362 #endif // SERIALGC 1395 #endif // SERIALGC
1363 case BarrierSet::CardTableModRef: 1396 case BarrierSet::CardTableModRef:
1364 case BarrierSet::CardTableExtension: 1397 case BarrierSet::CardTableExtension:
1365 // No pre barriers 1398 // No pre barriers
1396 } 1429 }
1397 1430
1398 //////////////////////////////////////////////////////////////////////// 1431 ////////////////////////////////////////////////////////////////////////
1399 #ifndef SERIALGC 1432 #ifndef SERIALGC
1400 1433
1401 void LIRGenerator::G1SATBCardTableModRef_pre_barrier(LIR_Opr addr_opr, bool patch, CodeEmitInfo* info) { 1434 void LIRGenerator::G1SATBCardTableModRef_pre_barrier(LIR_Opr addr_opr, LIR_Opr pre_val,
1402 if (G1DisablePreBarrier) return; 1435 bool do_load, bool patch, CodeEmitInfo* info) {
1403
1404 // First we test whether marking is in progress. 1436 // First we test whether marking is in progress.
1405 BasicType flag_type; 1437 BasicType flag_type;
1406 if (in_bytes(PtrQueue::byte_width_of_active()) == 4) { 1438 if (in_bytes(PtrQueue::byte_width_of_active()) == 4) {
1407 flag_type = T_INT; 1439 flag_type = T_INT;
1408 } else { 1440 } else {
1417 PtrQueue::byte_offset_of_active()), 1449 PtrQueue::byte_offset_of_active()),
1418 flag_type); 1450 flag_type);
1419 // Read the marking-in-progress flag. 1451 // Read the marking-in-progress flag.
1420 LIR_Opr flag_val = new_register(T_INT); 1452 LIR_Opr flag_val = new_register(T_INT);
1421 __ load(mark_active_flag_addr, flag_val); 1453 __ load(mark_active_flag_addr, flag_val);
1422
1423 LIR_PatchCode pre_val_patch_code =
1424 patch ? lir_patch_normal : lir_patch_none;
1425
1426 LIR_Opr pre_val = new_register(T_OBJECT);
1427
1428 __ cmp(lir_cond_notEqual, flag_val, LIR_OprFact::intConst(0)); 1454 __ cmp(lir_cond_notEqual, flag_val, LIR_OprFact::intConst(0));
1429 if (!addr_opr->is_address()) { 1455
1430 assert(addr_opr->is_register(), "must be"); 1456 LIR_PatchCode pre_val_patch_code = lir_patch_none;
1431 addr_opr = LIR_OprFact::address(new LIR_Address(addr_opr, T_OBJECT)); 1457
1432 } 1458 CodeStub* slow;
1433 CodeStub* slow = new G1PreBarrierStub(addr_opr, pre_val, pre_val_patch_code, 1459
1434 info); 1460 if (do_load) {
1461 assert(pre_val == LIR_OprFact::illegalOpr, "sanity");
1462 assert(addr_opr != LIR_OprFact::illegalOpr, "sanity");
1463
1464 if (patch)
1465 pre_val_patch_code = lir_patch_normal;
1466
1467 pre_val = new_register(T_OBJECT);
1468
1469 if (!addr_opr->is_address()) {
1470 assert(addr_opr->is_register(), "must be");
1471 addr_opr = LIR_OprFact::address(new LIR_Address(addr_opr, T_OBJECT));
1472 }
1473 slow = new G1PreBarrierStub(addr_opr, pre_val, pre_val_patch_code, info);
1474 } else {
1475 assert(addr_opr == LIR_OprFact::illegalOpr, "sanity");
1476 assert(pre_val->is_register(), "must be");
1477 assert(pre_val->type() == T_OBJECT, "must be an object");
1478 assert(info == NULL, "sanity");
1479
1480 slow = new G1PreBarrierStub(pre_val);
1481 }
1482
1435 __ branch(lir_cond_notEqual, T_INT, slow); 1483 __ branch(lir_cond_notEqual, T_INT, slow);
1436 __ branch_destination(slow->continuation()); 1484 __ branch_destination(slow->continuation());
1437 } 1485 }
1438 1486
1439 void LIRGenerator::G1SATBCardTableModRef_post_barrier(LIR_OprDesc* addr, LIR_OprDesc* new_val) { 1487 void LIRGenerator::G1SATBCardTableModRef_post_barrier(LIR_OprDesc* addr, LIR_OprDesc* new_val) {
1440 if (G1DisablePostBarrier) return;
1441
1442 // If the "new_val" is a constant NULL, no barrier is necessary. 1488 // If the "new_val" is a constant NULL, no barrier is necessary.
1443 if (new_val->is_constant() && 1489 if (new_val->is_constant() &&
1444 new_val->as_constant_ptr()->as_jobject() == NULL) return; 1490 new_val->as_constant_ptr()->as_jobject() == NULL) return;
1445 1491
1446 if (!new_val->is_register()) { 1492 if (!new_val->is_register()) {
1660 } 1706 }
1661 1707
1662 if (is_oop) { 1708 if (is_oop) {
1663 // Do the pre-write barrier, if any. 1709 // Do the pre-write barrier, if any.
1664 pre_barrier(LIR_OprFact::address(address), 1710 pre_barrier(LIR_OprFact::address(address),
1711 LIR_OprFact::illegalOpr /* pre_val */,
1712 true /* do_load*/,
1665 needs_patching, 1713 needs_patching,
1666 (info ? new CodeEmitInfo(info) : NULL)); 1714 (info ? new CodeEmitInfo(info) : NULL));
1667 } 1715 }
1668 1716
1669 if (is_volatile && !needs_patching) { 1717 if (is_volatile && !needs_patching) {
2089 LIRItem off(x->offset(), this); 2137 LIRItem off(x->offset(), this);
2090 2138
2091 off.load_item(); 2139 off.load_item();
2092 src.load_item(); 2140 src.load_item();
2093 2141
2094 LIR_Opr reg = reg = rlock_result(x, x->basic_type()); 2142 LIR_Opr reg = rlock_result(x, x->basic_type());
2095 2143
2096 get_Object_unsafe(reg, src.result(), off.result(), type, x->is_volatile()); 2144 get_Object_unsafe(reg, src.result(), off.result(), type, x->is_volatile());
2145
2146 #ifndef SERIALGC
2147 // We might be reading the value of the referent field of a
2148 // Reference object in order to attach it back to the live
2149 // object graph. If G1 is enabled then we need to record
2150 // the value that is being returned in an SATB log buffer.
2151 //
2152 // We need to generate code similar to the following...
2153 //
2154 // if (offset == java_lang_ref_Reference::referent_offset) {
2155 // if (src != NULL) {
2156 // if (klass(src)->reference_type() != REF_NONE) {
2157 // pre_barrier(..., reg, ...);
2158 // }
2159 // }
2160 // }
2161 //
2162 // The first non-constant check of either the offset or
2163 // the src operand will be done here; the remainder
2164 // will take place in the generated code stub.
2165
2166 if (UseG1GC && type == T_OBJECT) {
2167 bool gen_code_stub = true; // Assume we need to generate the slow code stub.
2168 bool gen_offset_check = true; // Assume the code stub has to generate the offset guard.
2169 bool gen_source_check = true; // Assume the code stub has to check the src object for null.
2170
2171 if (off.is_constant()) {
2172 jlong off_con = (off.type()->is_int() ?
2173 (jlong) off.get_jint_constant() :
2174 off.get_jlong_constant());
2175
2176
2177 if (off_con != (jlong) java_lang_ref_Reference::referent_offset) {
2178 // The constant offset is something other than referent_offset.
2179 // We can skip generating/checking the remaining guards and
2180 // skip generation of the code stub.
2181 gen_code_stub = false;
2182 } else {
2183 // The constant offset is the same as referent_offset -
2184 // we do not need to generate a runtime offset check.
2185 gen_offset_check = false;
2186 }
2187 }
2188
2189 // We don't need to generate stub if the source object is an array
2190 if (gen_code_stub && src.type()->is_array()) {
2191 gen_code_stub = false;
2192 }
2193
2194 if (gen_code_stub) {
2195 // We still need to continue with the checks.
2196 if (src.is_constant()) {
2197 ciObject* src_con = src.get_jobject_constant();
2198
2199 if (src_con->is_null_object()) {
2200 // The constant src object is null - We can skip
2201 // generating the code stub.
2202 gen_code_stub = false;
2203 } else {
2204 // Non-null constant source object. We still have to generate
2205 // the slow stub - but we don't need to generate the runtime
2206 // null object check.
2207 gen_source_check = false;
2208 }
2209 }
2210 }
2211
2212 if (gen_code_stub) {
2213 // Temoraries.
2214 LIR_Opr src_klass = new_register(T_OBJECT);
2215
2216 // Get the thread pointer for the pre-barrier
2217 LIR_Opr thread = getThreadPointer();
2218
2219 CodeStub* stub;
2220
2221 // We can have generate one runtime check here. Let's start with
2222 // the offset check.
2223 if (gen_offset_check) {
2224 // if (offset == referent_offset) -> slow code stub
2225 // If offset is an int then we can do the comparison with the
2226 // referent_offset constant; otherwise we need to move
2227 // referent_offset into a temporary register and generate
2228 // a reg-reg compare.
2229
2230 LIR_Opr referent_off;
2231
2232 if (off.type()->is_int()) {
2233 referent_off = LIR_OprFact::intConst(java_lang_ref_Reference::referent_offset);
2234 } else {
2235 assert(off.type()->is_long(), "what else?");
2236 referent_off = new_register(T_LONG);
2237 __ move(LIR_OprFact::longConst(java_lang_ref_Reference::referent_offset), referent_off);
2238 }
2239
2240 __ cmp(lir_cond_equal, off.result(), referent_off);
2241
2242 // Optionally generate "src == null" check.
2243 stub = new G1UnsafeGetObjSATBBarrierStub(reg, src.result(),
2244 src_klass, thread,
2245 gen_source_check);
2246
2247 __ branch(lir_cond_equal, as_BasicType(off.type()), stub);
2248 } else {
2249 if (gen_source_check) {
2250 // offset is a const and equals referent offset
2251 // if (source != null) -> slow code stub
2252 __ cmp(lir_cond_notEqual, src.result(), LIR_OprFact::oopConst(NULL));
2253
2254 // Since we are generating the "if src == null" guard here,
2255 // there is no need to generate the "src == null" check again.
2256 stub = new G1UnsafeGetObjSATBBarrierStub(reg, src.result(),
2257 src_klass, thread,
2258 false);
2259
2260 __ branch(lir_cond_notEqual, T_OBJECT, stub);
2261 } else {
2262 // We have statically determined that offset == referent_offset
2263 // && src != null so we unconditionally branch to code stub
2264 // to perform the guards and record reg in the SATB log buffer.
2265
2266 stub = new G1UnsafeGetObjSATBBarrierStub(reg, src.result(),
2267 src_klass, thread,
2268 false);
2269
2270 __ branch(lir_cond_always, T_ILLEGAL, stub);
2271 }
2272 }
2273
2274 // Continuation point
2275 __ branch_destination(stub->continuation());
2276 }
2277 }
2278 #endif // SERIALGC
2279
2097 if (x->is_volatile() && os::is_MP()) __ membar_acquire(); 2280 if (x->is_volatile() && os::is_MP()) __ membar_acquire();
2098 } 2281 }
2099 2282
2100 2283
2101 void LIRGenerator::do_UnsafePutObject(UnsafePutObject* x) { 2284 void LIRGenerator::do_UnsafePutObject(UnsafePutObject* x) {
2614 receiver = LIR_Assembler::receiverOpr(); 2797 receiver = LIR_Assembler::receiverOpr();
2615 LIR_Opr tmp = new_register(objectType); 2798 LIR_Opr tmp = new_register(objectType);
2616 2799
2617 // Load CallSite object from constant pool cache. 2800 // Load CallSite object from constant pool cache.
2618 __ oop2reg(cpcache->constant_encoding(), tmp); 2801 __ oop2reg(cpcache->constant_encoding(), tmp);
2619 __ load(new LIR_Address(tmp, (int)call_site_offset, T_OBJECT), tmp); 2802 __ move_wide(new LIR_Address(tmp, (int)call_site_offset, T_OBJECT), tmp);
2620 2803
2621 // Load target MethodHandle from CallSite object. 2804 // Load target MethodHandle from CallSite object.
2622 __ load(new LIR_Address(tmp, java_lang_invoke_CallSite::target_offset_in_bytes(), T_OBJECT), receiver); 2805 __ load(new LIR_Address(tmp, java_lang_invoke_CallSite::target_offset_in_bytes(), T_OBJECT), receiver);
2623 2806
2624 __ call_dynamic(target, receiver, result_register, 2807 __ call_dynamic(target, receiver, result_register,
2755 break; 2938 break;
2756 2939
2757 // sun.misc.AtomicLongCSImpl.attemptUpdate 2940 // sun.misc.AtomicLongCSImpl.attemptUpdate
2758 case vmIntrinsics::_attemptUpdate: 2941 case vmIntrinsics::_attemptUpdate:
2759 do_AttemptUpdate(x); 2942 do_AttemptUpdate(x);
2943 break;
2944
2945 case vmIntrinsics::_Reference_get:
2946 do_Reference_get(x);
2760 break; 2947 break;
2761 2948
2762 default: ShouldNotReachHere(); break; 2949 default: ShouldNotReachHere(); break;
2763 } 2950 }
2764 } 2951 }