comparison src/share/vm/c1/c1_LIRGenerator.cpp @ 6275:957c266d8bc5

Merge with http://hg.openjdk.java.net/hsx/hsx24/hotspot/
author Doug Simon <doug.simon@oracle.com>
date Tue, 21 Aug 2012 10:39:19 +0200
parents 33df1aeaebbf 1d7922586cf6
children c38f13903fdf
comparison
equal deleted inserted replaced
5891:fd8832ae511d 6275:957c266d8bc5
1 /* 1 /*
2 * Copyright (c) 2005, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 2005, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
1238 pre_barrier(LIR_OprFact::illegalOpr /* addr_opr */, 1238 pre_barrier(LIR_OprFact::illegalOpr /* addr_opr */,
1239 result /* pre_val */, 1239 result /* pre_val */,
1240 false /* do_load */, 1240 false /* do_load */,
1241 false /* patch */, 1241 false /* patch */,
1242 NULL /* info */); 1242 NULL /* info */);
1243 }
1244
1245 // Example: clazz.isInstance(object)
1246 void LIRGenerator::do_isInstance(Intrinsic* x) {
1247 assert(x->number_of_arguments() == 2, "wrong type");
1248
1249 // TODO could try to substitute this node with an equivalent InstanceOf
1250 // if clazz is known to be a constant Class. This will pick up newly found
1251 // constants after HIR construction. I'll leave this to a future change.
1252
1253 // as a first cut, make a simple leaf call to runtime to stay platform independent.
1254 // could follow the aastore example in a future change.
1255
1256 LIRItem clazz(x->argument_at(0), this);
1257 LIRItem object(x->argument_at(1), this);
1258 clazz.load_item();
1259 object.load_item();
1260 LIR_Opr result = rlock_result(x);
1261
1262 // need to perform null check on clazz
1263 if (x->needs_null_check()) {
1264 CodeEmitInfo* info = state_for(x);
1265 __ null_check(clazz.result(), info);
1266 }
1267
1268 LIR_Opr call_result = call_runtime(clazz.value(), object.value(),
1269 CAST_FROM_FN_PTR(address, Runtime1::is_instance_of),
1270 x->type(),
1271 NULL); // NULL CodeEmitInfo results in a leaf call
1272 __ move(call_result, result);
1243 } 1273 }
1244 1274
1245 // Example: object.getClass () 1275 // Example: object.getClass ()
1246 void LIRGenerator::do_getClass(Intrinsic* x) { 1276 void LIRGenerator::do_getClass(Intrinsic* x) {
1247 assert(x->number_of_arguments() == 1, "wrong type"); 1277 assert(x->number_of_arguments() == 1, "wrong type");
1908 __ null_check(value.result(), info); 1938 __ null_check(value.result(), info);
1909 } 1939 }
1910 } 1940 }
1911 1941
1912 1942
1943 void LIRGenerator::do_TypeCast(TypeCast* x) {
1944 LIRItem value(x->obj(), this);
1945 value.load_item();
1946 // the result is the same as from the node we are casting
1947 set_result(x, value.result());
1948 }
1949
1950
1913 void LIRGenerator::do_Throw(Throw* x) { 1951 void LIRGenerator::do_Throw(Throw* x) {
1914 LIRItem exception(x->exception(), this); 1952 LIRItem exception(x->exception(), this);
1915 exception.load_item(); 1953 exception.load_item();
1916 set_no_result(x); 1954 set_no_result(x);
1917 LIR_Opr exception_opr = exception.result(); 1955 LIR_Opr exception_opr = exception.result();
2735 assert(receiver->is_illegal() || receiver->is_equal(LIR_Assembler::receiverOpr()), "must match"); 2773 assert(receiver->is_illegal() || receiver->is_equal(LIR_Assembler::receiverOpr()), "must match");
2736 2774
2737 // JSR 292 2775 // JSR 292
2738 // Preserve the SP over MethodHandle call sites. 2776 // Preserve the SP over MethodHandle call sites.
2739 ciMethod* target = x->target(); 2777 ciMethod* target = x->target();
2740 if (target->is_method_handle_invoke()) { 2778 bool is_method_handle_invoke = (// %%% FIXME: Are both of these relevant?
2779 target->is_method_handle_intrinsic() ||
2780 target->is_compiled_lambda_form());
2781 if (is_method_handle_invoke) {
2741 info->set_is_method_handle_invoke(true); 2782 info->set_is_method_handle_invoke(true);
2742 __ move(FrameMap::stack_pointer(), FrameMap::method_handle_invoke_SP_save_opr()); 2783 __ move(FrameMap::stack_pointer(), FrameMap::method_handle_invoke_SP_save_opr());
2743 } 2784 }
2744 2785
2745 switch (x->code()) { 2786 switch (x->code()) {
2775 2816
2776 // Get CallSite offset from constant pool cache pointer. 2817 // Get CallSite offset from constant pool cache pointer.
2777 int index = bcs.get_method_index(); 2818 int index = bcs.get_method_index();
2778 size_t call_site_offset = cpcache->get_f1_offset(index); 2819 size_t call_site_offset = cpcache->get_f1_offset(index);
2779 2820
2821 // Load CallSite object from constant pool cache.
2822 LIR_Opr call_site = new_register(objectType);
2823 __ oop2reg(cpcache->constant_encoding(), call_site);
2824 __ move_wide(new LIR_Address(call_site, call_site_offset, T_OBJECT), call_site);
2825
2780 // If this invokedynamic call site hasn't been executed yet in 2826 // If this invokedynamic call site hasn't been executed yet in
2781 // the interpreter, the CallSite object in the constant pool 2827 // the interpreter, the CallSite object in the constant pool
2782 // cache is still null and we need to deoptimize. 2828 // cache is still null and we need to deoptimize.
2783 if (cpcache->is_f1_null_at(index)) { 2829 if (cpcache->is_f1_null_at(index)) {
2784 // Cannot re-use same xhandlers for multiple CodeEmitInfos, so 2830 // Only deoptimize if the CallSite object is still null; we don't
2785 // clone all handlers. This is handled transparently in other 2831 // recompile methods in C1 after deoptimization so this call site
2786 // places by the CodeEmitInfo cloning logic but is handled 2832 // might be resolved the next time we execute it after OSR.
2787 // specially here because a stub isn't being used.
2788 x->set_exception_handlers(new XHandlers(x->exception_handlers()));
2789
2790 DeoptimizeStub* deopt_stub = new DeoptimizeStub(deopt_info); 2833 DeoptimizeStub* deopt_stub = new DeoptimizeStub(deopt_info);
2791 __ jump(deopt_stub); 2834 __ cmp(lir_cond_equal, call_site, LIR_OprFact::oopConst(NULL));
2835 __ branch(lir_cond_equal, T_OBJECT, deopt_stub);
2792 } 2836 }
2793 2837
2794 // Use the receiver register for the synthetic MethodHandle 2838 // Use the receiver register for the synthetic MethodHandle
2795 // argument. 2839 // argument.
2796 receiver = LIR_Assembler::receiverOpr(); 2840 receiver = LIR_Assembler::receiverOpr();
2797 LIR_Opr tmp = new_register(objectType);
2798
2799 // Load CallSite object from constant pool cache.
2800 __ oop2reg(cpcache->constant_encoding(), tmp);
2801 __ move_wide(new LIR_Address(tmp, (int)call_site_offset, T_OBJECT), tmp);
2802 2841
2803 // Load target MethodHandle from CallSite object. 2842 // Load target MethodHandle from CallSite object.
2804 __ load(new LIR_Address(tmp, java_lang_invoke_CallSite::target_offset_in_bytes(), T_OBJECT), receiver); 2843 __ load(new LIR_Address(call_site, java_lang_invoke_CallSite::target_offset_in_bytes(), T_OBJECT), receiver);
2805 2844
2806 __ call_dynamic(target, receiver, result_register, 2845 __ call_dynamic(target, receiver, result_register,
2807 SharedRuntime::get_resolve_opt_virtual_call_stub(), 2846 SharedRuntime::get_resolve_opt_virtual_call_stub(),
2808 arg_list, info); 2847 arg_list, info);
2809 break; 2848 break;
2810 } 2849 }
2811 default: 2850 default:
2812 ShouldNotReachHere(); 2851 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(x->code())));
2813 break; 2852 break;
2814 } 2853 }
2815 2854
2816 // JSR 292 2855 // JSR 292
2817 // Restore the SP after MethodHandle call sites. 2856 // Restore the SP after MethodHandle call sites.
2818 if (target->is_method_handle_invoke()) { 2857 if (is_method_handle_invoke) {
2819 __ move(FrameMap::method_handle_invoke_SP_save_opr(), FrameMap::stack_pointer()); 2858 __ move(FrameMap::method_handle_invoke_SP_save_opr(), FrameMap::stack_pointer());
2820 } 2859 }
2821 2860
2822 if (x->type()->is_float() || x->type()->is_double()) { 2861 if (x->type()->is_float() || x->type()->is_double()) {
2823 // Force rounding of results from non-strictfp when in strictfp 2862 // Force rounding of results from non-strictfp when in strictfp
2877 2916
2878 __ cmp(lir_cond(x->cond()), left.result(), right.result()); 2917 __ cmp(lir_cond(x->cond()), left.result(), right.result());
2879 __ cmove(lir_cond(x->cond()), t_val.result(), f_val.result(), reg, as_BasicType(x->x()->type())); 2918 __ cmove(lir_cond(x->cond()), t_val.result(), f_val.result(), reg, as_BasicType(x->x()->type()));
2880 } 2919 }
2881 2920
2921 void LIRGenerator::do_RuntimeCall(address routine, int expected_arguments, Intrinsic* x) {
2922 assert(x->number_of_arguments() == expected_arguments, "wrong type");
2923 LIR_Opr reg = result_register_for(x->type());
2924 __ call_runtime_leaf(routine, getThreadTemp(),
2925 reg, new LIR_OprList());
2926 LIR_Opr result = rlock_result(x);
2927 __ move(reg, result);
2928 }
2929
2930 #ifdef TRACE_HAVE_INTRINSICS
2931 void LIRGenerator::do_ThreadIDIntrinsic(Intrinsic* x) {
2932 LIR_Opr thread = getThreadPointer();
2933 LIR_Opr osthread = new_pointer_register();
2934 __ move(new LIR_Address(thread, in_bytes(JavaThread::osthread_offset()), osthread->type()), osthread);
2935 size_t thread_id_size = OSThread::thread_id_size();
2936 if (thread_id_size == (size_t) BytesPerLong) {
2937 LIR_Opr id = new_register(T_LONG);
2938 __ move(new LIR_Address(osthread, in_bytes(OSThread::thread_id_offset()), T_LONG), id);
2939 __ convert(Bytecodes::_l2i, id, rlock_result(x));
2940 } else if (thread_id_size == (size_t) BytesPerInt) {
2941 __ move(new LIR_Address(osthread, in_bytes(OSThread::thread_id_offset()), T_INT), rlock_result(x));
2942 } else {
2943 ShouldNotReachHere();
2944 }
2945 }
2946
2947 void LIRGenerator::do_ClassIDIntrinsic(Intrinsic* x) {
2948 CodeEmitInfo* info = state_for(x);
2949 CodeEmitInfo* info2 = new CodeEmitInfo(info); // Clone for the second null check
2950 assert(info != NULL, "must have info");
2951 LIRItem arg(x->argument_at(1), this);
2952 arg.load_item();
2953 LIR_Opr klass = new_register(T_OBJECT);
2954 __ move(new LIR_Address(arg.result(), java_lang_Class::klass_offset_in_bytes(), T_OBJECT), klass, info);
2955 LIR_Opr id = new_register(T_LONG);
2956 ByteSize offset = TRACE_ID_OFFSET;
2957 LIR_Address* trace_id_addr = new LIR_Address(klass, in_bytes(offset), T_LONG);
2958 __ move(trace_id_addr, id);
2959 __ logical_or(id, LIR_OprFact::longConst(0x01l), id);
2960 __ store(id, trace_id_addr);
2961 __ logical_and(id, LIR_OprFact::longConst(~0x3l), id);
2962 __ move(id, rlock_result(x));
2963 }
2964 #endif
2882 2965
2883 void LIRGenerator::do_Intrinsic(Intrinsic* x) { 2966 void LIRGenerator::do_Intrinsic(Intrinsic* x) {
2884 switch (x->id()) { 2967 switch (x->id()) {
2885 case vmIntrinsics::_intBitsToFloat : 2968 case vmIntrinsics::_intBitsToFloat :
2886 case vmIntrinsics::_doubleToRawLongBits : 2969 case vmIntrinsics::_doubleToRawLongBits :
2888 case vmIntrinsics::_floatToRawIntBits : { 2971 case vmIntrinsics::_floatToRawIntBits : {
2889 do_FPIntrinsics(x); 2972 do_FPIntrinsics(x);
2890 break; 2973 break;
2891 } 2974 }
2892 2975
2893 case vmIntrinsics::_currentTimeMillis: { 2976 #ifdef TRACE_HAVE_INTRINSICS
2894 assert(x->number_of_arguments() == 0, "wrong type"); 2977 case vmIntrinsics::_threadID: do_ThreadIDIntrinsic(x); break;
2895 LIR_Opr reg = result_register_for(x->type()); 2978 case vmIntrinsics::_classID: do_ClassIDIntrinsic(x); break;
2896 __ call_runtime_leaf(CAST_FROM_FN_PTR(address, os::javaTimeMillis), getThreadTemp(), 2979 case vmIntrinsics::_counterTime:
2897 reg, new LIR_OprList()); 2980 do_RuntimeCall(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), 0, x);
2898 LIR_Opr result = rlock_result(x);
2899 __ move(reg, result);
2900 break; 2981 break;
2901 } 2982 #endif
2902 2983
2903 case vmIntrinsics::_nanoTime: { 2984 case vmIntrinsics::_currentTimeMillis:
2904 assert(x->number_of_arguments() == 0, "wrong type"); 2985 do_RuntimeCall(CAST_FROM_FN_PTR(address, os::javaTimeMillis), 0, x);
2905 LIR_Opr reg = result_register_for(x->type());
2906 __ call_runtime_leaf(CAST_FROM_FN_PTR(address, os::javaTimeNanos), getThreadTemp(),
2907 reg, new LIR_OprList());
2908 LIR_Opr result = rlock_result(x);
2909 __ move(reg, result);
2910 break; 2986 break;
2911 } 2987
2988 case vmIntrinsics::_nanoTime:
2989 do_RuntimeCall(CAST_FROM_FN_PTR(address, os::javaTimeNanos), 0, x);
2990 break;
2912 2991
2913 case vmIntrinsics::_Object_init: do_RegisterFinalizer(x); break; 2992 case vmIntrinsics::_Object_init: do_RegisterFinalizer(x); break;
2993 case vmIntrinsics::_isInstance: do_isInstance(x); break;
2914 case vmIntrinsics::_getClass: do_getClass(x); break; 2994 case vmIntrinsics::_getClass: do_getClass(x); break;
2915 case vmIntrinsics::_currentThread: do_currentThread(x); break; 2995 case vmIntrinsics::_currentThread: do_currentThread(x); break;
2916 2996
2917 case vmIntrinsics::_dlog: // fall through 2997 case vmIntrinsics::_dlog: // fall through
2918 case vmIntrinsics::_dlog10: // fall through 2998 case vmIntrinsics::_dlog10: // fall through
2919 case vmIntrinsics::_dabs: // fall through 2999 case vmIntrinsics::_dabs: // fall through
2920 case vmIntrinsics::_dsqrt: // fall through 3000 case vmIntrinsics::_dsqrt: // fall through
2921 case vmIntrinsics::_dtan: // fall through 3001 case vmIntrinsics::_dtan: // fall through
2922 case vmIntrinsics::_dsin : // fall through 3002 case vmIntrinsics::_dsin : // fall through
2923 case vmIntrinsics::_dcos : do_MathIntrinsic(x); break; 3003 case vmIntrinsics::_dcos : // fall through
3004 case vmIntrinsics::_dexp : // fall through
3005 case vmIntrinsics::_dpow : do_MathIntrinsic(x); break;
2924 case vmIntrinsics::_arraycopy: do_ArrayCopy(x); break; 3006 case vmIntrinsics::_arraycopy: do_ArrayCopy(x); break;
2925 3007
2926 // java.nio.Buffer.checkIndex 3008 // java.nio.Buffer.checkIndex
2927 case vmIntrinsics::_checkIndex: do_NIOCheckIndex(x); break; 3009 case vmIntrinsics::_checkIndex: do_NIOCheckIndex(x); break;
2928 3010
2932 case vmIntrinsics::_compareAndSwapInt: 3014 case vmIntrinsics::_compareAndSwapInt:
2933 do_CompareAndSwap(x, intType); 3015 do_CompareAndSwap(x, intType);
2934 break; 3016 break;
2935 case vmIntrinsics::_compareAndSwapLong: 3017 case vmIntrinsics::_compareAndSwapLong:
2936 do_CompareAndSwap(x, longType); 3018 do_CompareAndSwap(x, longType);
2937 break;
2938
2939 // sun.misc.AtomicLongCSImpl.attemptUpdate
2940 case vmIntrinsics::_attemptUpdate:
2941 do_AttemptUpdate(x);
2942 break; 3019 break;
2943 3020
2944 case vmIntrinsics::_Reference_get: 3021 case vmIntrinsics::_Reference_get:
2945 do_Reference_get(x); 3022 do_Reference_get(x);
2946 break; 3023 break;
2959 LIRItem value(x->recv(), this); 3036 LIRItem value(x->recv(), this);
2960 value.load_item(); 3037 value.load_item();
2961 recv = new_register(T_OBJECT); 3038 recv = new_register(T_OBJECT);
2962 __ move(value.result(), recv); 3039 __ move(value.result(), recv);
2963 } 3040 }
2964 __ profile_call(x->method(), x->bci_of_invoke(), mdo, recv, tmp, x->known_holder()); 3041 __ profile_call(x->method(), x->bci_of_invoke(), x->callee(), mdo, recv, tmp, x->known_holder());
2965 } 3042 }
2966 3043
2967 void LIRGenerator::do_ProfileInvoke(ProfileInvoke* x) { 3044 void LIRGenerator::do_ProfileInvoke(ProfileInvoke* x) {
2968 // We can safely ignore accessors here, since c2 will inline them anyway, 3045 // We can safely ignore accessors here, since c2 will inline them anyway,
2969 // accessors are also always mature. 3046 // accessors are also always mature.
3179 case lir_membar_storeload : __ membar_storeload(); break; 3256 case lir_membar_storeload : __ membar_storeload(); break;
3180 default : ShouldNotReachHere(); break; 3257 default : ShouldNotReachHere(); break;
3181 } 3258 }
3182 } 3259 }
3183 } 3260 }
3184