Mercurial > hg > truffle
comparison src/share/vm/c1x/c1x_CodeInstaller.cpp @ 1433:efba53f86c4f
various fixes and enhancements
* correct refmap->oopmap conversion (register numbering, stack slot numbering)
* fixes for inlining (correct scoping in exception handler lookup, NPE in scope conversion)
* support for "jump to runtime stub" (patching code needs to be aware of jmp instruction)
* provide more information about methods (to allow inlining: has_balanced_monitors, etc.)
* fixes to signature type lookup
* isSubTypeOf: correct handling of array classes
* RiType: componentType/arrayOf
* prologue: inline cache check, icmiss stub
* klass state check (resolved but not initialized) in newinstance
* card table write barriers
* c1x classes are optional (to allow running c1 without them)
* correct for stored frame pointer in calling conventions (methods with arguments on stack)
* getType(Class<?>) for some basic types, used for optimizations and folding
* RiMethod/RiType: throw exception instead of silent failure on unsupported operations
* RiType: resolved/unresolved array type support
* refactoring: new on-demand template generation mechanism
* optimizations: template specialization for no_null_check, given length, etc.
author | Lukas Stadler <lukas.stadler@oracle.com> |
---|---|
date | Thu, 16 Sep 2010 19:42:20 -0700 |
parents | b61a43cd1255 |
children | 72cfb36c6bb2 |
comparison
equal
deleted
inserted
replaced
1432:b61a43cd1255 | 1433:efba53f86c4f |
---|---|
23 */ | 23 */ |
24 | 24 |
25 # include "incls/_precompiled.incl" | 25 # include "incls/_precompiled.incl" |
26 # include "incls/_c1x_CodeInstaller.cpp.incl" | 26 # include "incls/_c1x_CodeInstaller.cpp.incl" |
27 | 27 |
28 #define C1X_REGISTER_COUNT 32 | 28 // TODO this should be handled in a more robust way - not hard coded... |
29 Register CPU_REGS[] = { rax, rbx, rcx, rdx, rsi, rdi, r11, r12, r13, r14 }; | |
30 const static int NUM_CPU_REGS = 10; | |
31 XMMRegister XMM_REGS[] = { xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15 }; | |
32 const static int NUM_XMM_REGS = 16; | |
33 const static int NUM_REGS = NUM_CPU_REGS + NUM_XMM_REGS; | |
29 | 34 |
30 // convert c1x register indices (as used in oop maps) to hotspot registers | 35 // convert c1x register indices (as used in oop maps) to hotspot registers |
31 VMReg get_hotspot_reg(jint c1x_reg) { | 36 VMReg get_hotspot_reg(jint c1x_reg) { |
32 Register cpu_registers[] = { rax, rcx, rdx, rbx, rsp, rbp, rsi, rdi, r8, r9, r10, r11, r12, r13, r14, r15 }; | 37 |
33 XMMRegister xmm_registers[] = { xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, xmm14, xmm15 }; | 38 assert(c1x_reg >= 0 && c1x_reg < NUM_REGS, "invalid register number"); |
34 | 39 if (c1x_reg < NUM_CPU_REGS) { |
35 if (c1x_reg < 16) { | 40 return CPU_REGS[c1x_reg]->as_VMReg(); |
36 return cpu_registers[c1x_reg]->as_VMReg(); | |
37 } else { | 41 } else { |
38 assert(c1x_reg < C1X_REGISTER_COUNT, "invalid register number"); | 42 return XMM_REGS[c1x_reg - NUM_CPU_REGS]->as_VMReg(); |
39 return xmm_registers[c1x_reg - 16]->as_VMReg(); | |
40 } | 43 } |
41 } | 44 } |
42 | 45 |
43 // creates a hotspot oop map out of the byte arrays provided by CiDebugInfo | 46 // creates a hotspot oop map out of the byte arrays provided by CiDebugInfo |
44 static OopMap* create_oop_map(jint frame_size, jint parameter_count, oop debug_info) { | 47 static OopMap* create_oop_map(jint frame_size, jint parameter_count, oop debug_info) { |
45 OopMap* map = new OopMap(frame_size, parameter_count); | 48 OopMap* map = new OopMap(frame_size, parameter_count); |
46 arrayOop register_map = (arrayOop) CiDebugInfo::registerRefMap(debug_info); | 49 arrayOop register_map = (arrayOop) CiDebugInfo::registerRefMap(debug_info); |
47 arrayOop frame_map = (arrayOop) CiDebugInfo::frameRefMap(debug_info); | 50 arrayOop frame_map = (arrayOop) CiDebugInfo::frameRefMap(debug_info); |
48 | 51 |
49 for (jint i = 0; i < C1X_REGISTER_COUNT; i++) { | 52 assert(register_map->length() == (NUM_REGS + 7) / 8, "unexpected register_map length"); |
53 | |
54 for (jint i = 0; i < NUM_REGS; i++) { | |
50 unsigned char byte = ((unsigned char*) register_map->base(T_BYTE))[i / 8]; | 55 unsigned char byte = ((unsigned char*) register_map->base(T_BYTE))[i / 8]; |
51 bool is_oop = (byte & (1 << (i % 8))) != 0; | 56 bool is_oop = (byte & (1 << (i % 8))) != 0; |
52 VMReg reg = get_hotspot_reg(i); | 57 VMReg reg = get_hotspot_reg(i); |
53 if (is_oop) { | 58 if (is_oop) { |
54 map->set_oop(reg); | 59 map->set_oop(reg); |
55 } else { | 60 } else { |
56 map->set_value(reg); | 61 map->set_value(reg); |
57 } | 62 } |
58 } | 63 } |
59 | 64 |
60 for (jint i = 0; i < frame_size; i++) { | 65 if (frame_size > 0) { |
61 unsigned char byte = ((unsigned char*) frame_map->base(T_BYTE))[i / 8]; | 66 assert(frame_map->length() == ((frame_size / HeapWordSize) + 7) / 8, "unexpected register_map length"); |
62 bool is_oop = (byte & (1 << (i % 8))) != 0; | 67 |
63 VMReg reg = VMRegImpl::stack2reg(i); | 68 for (jint i = 0; i < frame_size / HeapWordSize; i++) { |
64 if (is_oop) { | 69 unsigned char byte = ((unsigned char*) frame_map->base(T_BYTE))[i / 8]; |
65 map->set_oop(reg); | 70 bool is_oop = (byte & (1 << (i % 8))) != 0; |
66 } else { | 71 // hotspot stack slots are 4 bytes |
67 map->set_value(reg); | 72 VMReg reg = VMRegImpl::stack2reg(i * 2); |
68 } | 73 if (is_oop) { |
69 } | 74 map->set_oop(reg); |
70 | 75 } else { |
71 // TODO parameters? | 76 map->set_value(reg); |
77 } | |
78 } | |
79 } else { | |
80 assert(frame_map == NULL || frame_map->length() == 0, "cannot have frame_map for frames with size 0"); | |
81 } | |
82 | |
72 return map; | 83 return map; |
73 } | 84 } |
74 | 85 |
75 // TODO: finish this - c1x doesn't provide any scope values at the moment | 86 // TODO: finish this - c1x doesn't provide any scope values at the moment |
76 static ScopeValue* get_hotspot_value(oop value) { | 87 static ScopeValue* get_hotspot_value(oop value) { |
258 } | 269 } |
259 | 270 |
260 void CodeInstaller::record_scope(jint pc_offset, oop code_pos, oop frame) { | 271 void CodeInstaller::record_scope(jint pc_offset, oop code_pos, oop frame) { |
261 oop caller_pos = CiCodePos::caller(code_pos); | 272 oop caller_pos = CiCodePos::caller(code_pos); |
262 if (caller_pos != NULL) { | 273 if (caller_pos != NULL) { |
263 oop caller_frame = CiDebugInfo_Frame::caller(frame); | 274 oop caller_frame = frame == NULL ? NULL : CiDebugInfo_Frame::caller(frame); |
264 record_scope(pc_offset, caller_pos, caller_frame); | 275 record_scope(pc_offset, caller_pos, caller_frame); |
265 } else { | 276 } else { |
266 assert(frame == NULL || CiDebugInfo_Frame::caller(frame) == NULL, "unexpected layout - mismatching nesting of Frame and CiCodePos"); | 277 assert(frame == NULL || CiDebugInfo_Frame::caller(frame) == NULL, "unexpected layout - mismatching nesting of Frame and CiCodePos"); |
267 } | 278 } |
268 | 279 |
334 arrayOop stack_map = (arrayOop) CiTargetMethod_Call::stackMap(site); | 345 arrayOop stack_map = (arrayOop) CiTargetMethod_Call::stackMap(site); |
335 arrayOop register_map = (arrayOop) CiTargetMethod_Call::registerMap(site); | 346 arrayOop register_map = (arrayOop) CiTargetMethod_Call::registerMap(site); |
336 | 347 |
337 assert((runtime_call ? 1 : 0) + (hotspot_method ? 1 : 0) + (symbol ? 1 : 0) + (global_stub ? 1 : 0) == 1, "Call site needs exactly one type"); | 348 assert((runtime_call ? 1 : 0) + (hotspot_method ? 1 : 0) + (symbol ? 1 : 0) + (global_stub ? 1 : 0) == 1, "Call site needs exactly one type"); |
338 | 349 |
339 NativeCall* call = nativeCall_at(_instructions->start() + pc_offset); | 350 assert(NativeCall::instruction_size == (int)NativeJump::instruction_size, "unexpected size)"); |
340 jint next_pc_offset = pc_offset + NativeCall::instruction_size; | 351 jint next_pc_offset = pc_offset + NativeCall::instruction_size; |
341 | 352 |
342 if (debug_info != NULL) { | 353 if (debug_info != NULL) { |
343 _debug_recorder->add_safepoint(next_pc_offset, create_oop_map(_frame_size, _parameter_count, debug_info)); | 354 _debug_recorder->add_safepoint(next_pc_offset, create_oop_map(_frame_size, _parameter_count, debug_info)); |
344 oop code_pos = CiDebugInfo::codePos(debug_info); | 355 oop code_pos = CiDebugInfo::codePos(debug_info); |
345 oop frame = CiDebugInfo::frame(debug_info); | 356 oop frame = CiDebugInfo::frame(debug_info); |
346 record_scope(next_pc_offset, code_pos, frame); | 357 record_scope(next_pc_offset, code_pos, frame); |
347 } | 358 } |
348 | 359 |
349 if (runtime_call != NULL) { | 360 if (runtime_call != NULL) { |
361 NativeCall* call = nativeCall_at(_instructions->start() + pc_offset); | |
350 if (runtime_call == CiRuntimeCall::Debug()) { | 362 if (runtime_call == CiRuntimeCall::Debug()) { |
351 TRACE_C1X_3("CiRuntimeCall::Debug()"); | 363 TRACE_C1X_3("CiRuntimeCall::Debug()"); |
352 } else if (runtime_call == CiRuntimeCall::UnwindException()) { | 364 } else if (runtime_call == CiRuntimeCall::UnwindException()) { |
353 call->set_destination(Runtime1::entry_for(Runtime1::c1x_unwind_exception_call_id)); | 365 call->set_destination(Runtime1::entry_for(Runtime1::c1x_unwind_exception_call_id)); |
354 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); | 366 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); |
355 TRACE_C1X_3("CiRuntimeCall::UnwindException()"); | 367 TRACE_C1X_3("CiRuntimeCall::UnwindException()"); |
356 } else if (runtime_call == CiRuntimeCall::HandleException()) { | 368 } else if (runtime_call == CiRuntimeCall::HandleException()) { |
357 call->set_destination(Runtime1::entry_for(Runtime1::c1x_handle_exception_id)); | 369 call->set_destination(Runtime1::entry_for(Runtime1::c1x_handle_exception_id)); |
358 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); | 370 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); |
359 TRACE_C1X_3("CiRuntimeCall::HandleException()"); | 371 TRACE_C1X_3("CiRuntimeCall::HandleException()"); |
372 } else if (runtime_call == CiRuntimeCall::JavaTimeMillis()) { | |
373 call->set_destination((address)os::javaTimeMillis); | |
374 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); | |
375 TRACE_C1X_3("CiRuntimeCall::JavaTimeMillis()"); | |
376 } else if (runtime_call == CiRuntimeCall::JavaTimeNanos()) { | |
377 call->set_destination((address)os::javaTimeNanos); | |
378 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); | |
379 TRACE_C1X_3("CiRuntimeCall::JavaTimeNanos()"); | |
360 } else { | 380 } else { |
361 TRACE_C1X_1("runtime_call not implemented: "); | 381 TRACE_C1X_1("runtime_call not implemented: "); |
362 IF_TRACE_C1X_1 runtime_call->print(); | 382 IF_TRACE_C1X_1 runtime_call->print(); |
363 } | 383 } |
364 } else if (global_stub != NULL) { | 384 } else if (global_stub != NULL) { |
385 NativeInstruction* inst = nativeInstruction_at(_instructions->start() + pc_offset); | |
365 assert(java_lang_boxing_object::is_instance(global_stub, T_LONG), "global_stub needs to be of type Long"); | 386 assert(java_lang_boxing_object::is_instance(global_stub, T_LONG), "global_stub needs to be of type Long"); |
366 | 387 |
367 call->set_destination(VmIds::getStub(global_stub)); | 388 if (inst->is_call()) { |
368 _instructions->relocate(call->instruction_address(), runtime_call_Relocation::spec(), Assembler::call32_operand); | 389 nativeCall_at((address)inst)->set_destination(VmIds::getStub(global_stub)); |
369 TRACE_C1X_3("relocating (stub) at %016x", call->instruction_address()); | 390 } else { |
391 nativeJump_at((address)inst)->set_jump_destination(VmIds::getStub(global_stub)); | |
392 } | |
393 _instructions->relocate((address)inst, runtime_call_Relocation::spec(), Assembler::call32_operand); | |
394 TRACE_C1X_3("relocating (stub) at %016x", inst); | |
370 } else if (symbol != NULL) { | 395 } else if (symbol != NULL) { |
371 fatal("symbol"); | 396 fatal("symbol"); |
372 } else { // method != NULL | 397 } else { // method != NULL |
398 NativeCall* call = nativeCall_at(_instructions->start() + pc_offset); | |
373 assert(hotspot_method != NULL, "unexpected RiMethod"); | 399 assert(hotspot_method != NULL, "unexpected RiMethod"); |
374 assert(debug_info != NULL, "debug info expected"); | 400 assert(debug_info != NULL, "debug info expected"); |
375 | 401 |
376 methodOop method = NULL; | 402 methodOop method = NULL; |
377 if (hotspot_method->is_a(HotSpotMethodResolved::klass())) method = VmIds::get<methodOop>(HotSpotMethodResolved::vmId(hotspot_method)); | 403 if (hotspot_method->is_a(HotSpotMethodResolved::klass())) method = VmIds::get<methodOop>(HotSpotMethodResolved::vmId(hotspot_method)); |
521 _invoke_mark_pc = instruction; | 547 _invoke_mark_pc = instruction; |
522 break; | 548 break; |
523 case MARK_IMPLICIT_NULL: | 549 case MARK_IMPLICIT_NULL: |
524 _implicit_exception_table.append(pc_offset, pc_offset); | 550 _implicit_exception_table.append(pc_offset, pc_offset); |
525 break; | 551 break; |
526 case MARK_KLASS_PATCHING: { | 552 case MARK_KLASS_PATCHING: |
553 case MARK_ACCESS_FIELD_PATCHING: { | |
527 unsigned char* byte_count = (unsigned char*) (instruction - 1); | 554 unsigned char* byte_count = (unsigned char*) (instruction - 1); |
528 unsigned char* byte_skip = (unsigned char*) (instruction - 2); | 555 unsigned char* byte_skip = (unsigned char*) (instruction - 2); |
529 unsigned char* being_initialized_entry_offset = (unsigned char*) (instruction - 3); | 556 unsigned char* being_initialized_entry_offset = (unsigned char*) (instruction - 3); |
530 | 557 |
531 assert(*byte_skip == 5, "unexpected byte_skip"); | 558 assert(*byte_skip == 5, "unexpected byte_skip"); |
532 | 559 |
533 assert(references->length() == 2, "MARK_KLASS_PATCHING needs 2 references"); | 560 assert(references->length() == 2, "MARK_KLASS_PATCHING/MARK_ACCESS_FIELD_PATCHING needs 2 references"); |
534 oop ref1 = ((oop*) references->base(T_OBJECT))[0]; | 561 oop ref1 = ((oop*) references->base(T_OBJECT))[0]; |
535 oop ref2 = ((oop*) references->base(T_OBJECT))[1]; | 562 oop ref2 = ((oop*) references->base(T_OBJECT))[1]; |
536 int i_byte_count = CiTargetMethod_Site::pcOffset(ref2) - CiTargetMethod_Site::pcOffset(ref1); | 563 int i_byte_count = CiTargetMethod_Site::pcOffset(ref2) - CiTargetMethod_Site::pcOffset(ref1); |
537 assert(i_byte_count == (unsigned char)i_byte_count, "invalid offset"); | 564 assert(i_byte_count == (unsigned char)i_byte_count, "invalid offset"); |
538 *byte_count = i_byte_count; | 565 *byte_count = i_byte_count; |
539 *being_initialized_entry_offset = *byte_count + *byte_skip; | 566 *being_initialized_entry_offset = *byte_count + *byte_skip; |
540 | 567 |
568 // we need to correct the offset of a field access - it's created with MAX_INT to ensure the correct size, and hotspot expects 0 | |
569 if (id == MARK_ACCESS_FIELD_PATCHING) { | |
570 NativeMovRegMem* inst = nativeMovRegMem_at(_instructions->start() + CiTargetMethod_Site::pcOffset(ref1)); | |
571 assert(inst->offset() == max_jint, "unexpected offset value"); | |
572 inst->set_offset(0); | |
573 } | |
541 break; | 574 break; |
542 } | 575 } |
543 case MARK_DUMMY_OOP_RELOCATION: { | 576 case MARK_DUMMY_OOP_RELOCATION: { |
544 _instructions->relocate(instruction, oop_Relocation::spec_for_immediate(), Assembler::imm_operand); | 577 _instructions->relocate(instruction, oop_Relocation::spec_for_immediate(), Assembler::imm_operand); |
545 | 578 |