Mercurial > hg > truffle
comparison src/cpu/x86/vm/macroAssembler_x86.cpp @ 7427:2c7f594145dc
8004835: Improve AES intrinsics on x86
Summary: Enable AES intrinsics on non-AVX cpus, group together aes instructions in crypto stubs.
Reviewed-by: roland, twisti
author | kvn |
---|---|
date | Wed, 19 Dec 2012 15:40:35 -0800 |
parents | f0c2369fda5a |
children | d02120b7a34f |
comparison
equal
deleted
inserted
replaced
7426:65c8342f726a | 7427:2c7f594145dc |
---|---|
3083 } | 3083 } |
3084 } | 3084 } |
3085 | 3085 |
3086 void MacroAssembler::pshufb(XMMRegister dst, AddressLiteral src) { | 3086 void MacroAssembler::pshufb(XMMRegister dst, AddressLiteral src) { |
3087 // Used in sign-bit flipping with aligned address. | 3087 // Used in sign-bit flipping with aligned address. |
3088 assert((UseAVX > 0) || (((intptr_t)src.target() & 15) == 0), "SSE mode requires address alignment 16 bytes"); | 3088 bool aligned_adr = (((intptr_t)src.target() & 15) == 0); |
3089 assert((UseAVX > 0) || aligned_adr, "SSE mode requires address alignment 16 bytes"); | |
3089 if (reachable(src)) { | 3090 if (reachable(src)) { |
3090 Assembler::pshufb(dst, as_Address(src)); | 3091 Assembler::pshufb(dst, as_Address(src)); |
3091 } else { | 3092 } else { |
3092 lea(rscratch1, src); | 3093 lea(rscratch1, src); |
3093 Assembler::pshufb(dst, Address(rscratch1, 0)); | 3094 Assembler::pshufb(dst, Address(rscratch1, 0)); |