diff src/cpu/x86/vm/stubGenerator_x86_64.cpp @ 14909:4ca6dc0799b6

Backout jdk9 merge
author Gilles Duboscq <duboscq@ssw.jku.at>
date Tue, 01 Apr 2014 13:57:07 +0200
parents d8041d695d19
children 4062efea018b
line wrap: on
line diff
--- a/src/cpu/x86/vm/stubGenerator_x86_64.cpp	Tue Apr 01 14:09:03 2014 +0200
+++ b/src/cpu/x86/vm/stubGenerator_x86_64.cpp	Tue Apr 01 13:57:07 2014 +0200
@@ -3217,9 +3217,6 @@
   //   c_rarg3   - r vector byte array address
   //   c_rarg4   - input length
   //
-  // Output:
-  //   rax       - input length
-  //
   address generate_cipherBlockChaining_encryptAESCrypt() {
     assert(UseAES, "need AES instructions and misaligned SSE support");
     __ align(CodeEntryAlignment);
@@ -3235,7 +3232,7 @@
 #ifndef _WIN64
     const Register len_reg     = c_rarg4;  // src len (must be multiple of blocksize 16)
 #else
-    const Address  len_mem(rbp, 6 * wordSize);  // length is on stack on Win64
+    const Address  len_mem(rsp, 6 * wordSize);  // length is on stack on Win64
     const Register len_reg     = r10;      // pick the first volatile windows register
 #endif
     const Register pos         = rax;
@@ -3262,8 +3259,6 @@
     for (int i = 6; i <= XMM_REG_NUM_KEY_LAST; i++) {
       __ movdqu(xmm_save(i), as_XMMRegister(i));
     }
-#else
-    __ push(len_reg); // Save
 #endif
 
     const XMMRegister xmm_key_shuf_mask = xmm_temp;  // used temporarily to swap key bytes up front
@@ -3306,10 +3301,8 @@
     for (int i = 6; i <= XMM_REG_NUM_KEY_LAST; i++) {
       __ movdqu(as_XMMRegister(i), xmm_save(i));
     }
-    __ movl(rax, len_mem);
-#else
-    __ pop(rax); // return length
 #endif
+    __ movl(rax, 0); // return 0 (why?)
     __ leave(); // required for proper stackwalking of RuntimeStub frame
     __ ret(0);
 
@@ -3416,9 +3409,6 @@
   //   c_rarg3   - r vector byte array address
   //   c_rarg4   - input length
   //
-  // Output:
-  //   rax       - input length
-  //
 
   address generate_cipherBlockChaining_decryptAESCrypt_Parallel() {
     assert(UseAES, "need AES instructions and misaligned SSE support");
@@ -3437,7 +3427,7 @@
 #ifndef _WIN64
     const Register len_reg     = c_rarg4;  // src len (must be multiple of blocksize 16)
 #else
-    const Address  len_mem(rbp, 6 * wordSize);  // length is on stack on Win64
+    const Address  len_mem(rsp, 6 * wordSize);  // length is on stack on Win64
     const Register len_reg     = r10;      // pick the first volatile windows register
 #endif
     const Register pos         = rax;
@@ -3458,10 +3448,7 @@
     for (int i = 6; i <= XMM_REG_NUM_KEY_LAST; i++) {
       __ movdqu(xmm_save(i), as_XMMRegister(i));
     }
-#else
-    __ push(len_reg); // Save
 #endif
-
     // the java expanded key ordering is rotated one position from what we want
     // so we start from 0x10 here and hit 0x00 last
     const XMMRegister xmm_key_shuf_mask = xmm1;  // used temporarily to swap key bytes up front
@@ -3567,10 +3554,8 @@
     for (int i = 6; i <= XMM_REG_NUM_KEY_LAST; i++) {
       __ movdqu(as_XMMRegister(i), xmm_save(i));
     }
-    __ movl(rax, len_mem);
-#else
-    __ pop(rax); // return length
 #endif
+    __ movl(rax, 0); // return 0 (why?)
     __ leave(); // required for proper stackwalking of RuntimeStub frame
     __ ret(0);