comparison src/share/vm/classfile/classFileParser.cpp @ 7587:4a916f2ce331

8003985: Support @Contended Annotation - JEP 142 Summary: HotSpot changes to support @Contended annotation. Reviewed-by: coleenp, kvn, jrose Contributed-by: Aleksey Shipilev <aleksey.shipilev@oracle.com>
author jwilhelm
date Mon, 14 Jan 2013 15:17:47 +0100
parents adc176e95bf2
children 5b6a231e5a86
comparison
equal deleted inserted replaced
7586:90a92d5bca17 7587:4a916f2ce331
968 generic_signature_index = cfs->get_u2(CHECK); 968 generic_signature_index = cfs->get_u2(CHECK);
969 } else if (attribute_name == vmSymbols::tag_runtime_visible_annotations()) { 969 } else if (attribute_name == vmSymbols::tag_runtime_visible_annotations()) {
970 runtime_visible_annotations_length = attribute_length; 970 runtime_visible_annotations_length = attribute_length;
971 runtime_visible_annotations = cfs->get_u1_buffer(); 971 runtime_visible_annotations = cfs->get_u1_buffer();
972 assert(runtime_visible_annotations != NULL, "null visible annotations"); 972 assert(runtime_visible_annotations != NULL, "null visible annotations");
973 parse_annotations(loader_data,
974 runtime_visible_annotations,
975 runtime_visible_annotations_length,
976 cp,
977 parsed_annotations,
978 CHECK);
973 cfs->skip_u1(runtime_visible_annotations_length, CHECK); 979 cfs->skip_u1(runtime_visible_annotations_length, CHECK);
974 } else if (PreserveAllAnnotations && attribute_name == vmSymbols::tag_runtime_invisible_annotations()) { 980 } else if (PreserveAllAnnotations && attribute_name == vmSymbols::tag_runtime_invisible_annotations()) {
975 runtime_invisible_annotations_length = attribute_length; 981 runtime_invisible_annotations_length = attribute_length;
976 runtime_invisible_annotations = cfs->get_u1_buffer(); 982 runtime_invisible_annotations = cfs->get_u1_buffer();
977 assert(runtime_invisible_annotations != NULL, "null invisible annotations"); 983 assert(runtime_invisible_annotations != NULL, "null invisible annotations");
1214 1220
1215 FieldInfo* field = FieldInfo::from_field_array(fa, n); 1221 FieldInfo* field = FieldInfo::from_field_array(fa, n);
1216 field->initialize(access_flags.as_short(), 1222 field->initialize(access_flags.as_short(),
1217 name_index, 1223 name_index,
1218 signature_index, 1224 signature_index,
1219 constantvalue_index, 1225 constantvalue_index);
1220 0); 1226 BasicType type = cp->basic_type_for_signature_at(signature_index);
1227
1228 // Remember how many oops we encountered and compute allocation type
1229 FieldAllocationType atype = fac->update(is_static, type);
1230 field->set_allocation_type(atype);
1231
1232 // After field is initialized with type, we can augment it with aux info
1221 if (parsed_annotations.has_any_annotations()) 1233 if (parsed_annotations.has_any_annotations())
1222 parsed_annotations.apply_to(field); 1234 parsed_annotations.apply_to(field);
1223
1224 BasicType type = cp->basic_type_for_signature_at(signature_index);
1225
1226 // Remember how many oops we encountered and compute allocation type
1227 FieldAllocationType atype = fac->update(is_static, type);
1228
1229 // The correct offset is computed later (all oop fields will be located together)
1230 // We temporarily store the allocation type in the offset field
1231 field->set_offset(atype);
1232 } 1235 }
1233 1236
1234 int index = length; 1237 int index = length;
1235 if (num_injected != 0) { 1238 if (num_injected != 0) {
1236 for (int n = 0; n < num_injected; n++) { 1239 for (int n = 0; n < num_injected; n++) {
1257 // Injected field 1260 // Injected field
1258 FieldInfo* field = FieldInfo::from_field_array(fa, index); 1261 FieldInfo* field = FieldInfo::from_field_array(fa, index);
1259 field->initialize(JVM_ACC_FIELD_INTERNAL, 1262 field->initialize(JVM_ACC_FIELD_INTERNAL,
1260 injected[n].name_index, 1263 injected[n].name_index,
1261 injected[n].signature_index, 1264 injected[n].signature_index,
1262 0,
1263 0); 1265 0);
1264 1266
1265 BasicType type = FieldType::basic_type(injected[n].signature()); 1267 BasicType type = FieldType::basic_type(injected[n].signature());
1266 1268
1267 // Remember how many oops we encountered and compute allocation type 1269 // Remember how many oops we encountered and compute allocation type
1268 FieldAllocationType atype = fac->update(false, type); 1270 FieldAllocationType atype = fac->update(false, type);
1269 1271 field->set_allocation_type(atype);
1270 // The correct offset is computed later (all oop fields will be located together)
1271 // We temporarily store the allocation type in the offset field
1272 field->set_offset(atype);
1273 index++; 1272 index++;
1274 } 1273 }
1275 } 1274 }
1276 1275
1277 // Now copy the fields' data from the temporary resource array. 1276 // Now copy the fields' data from the temporary resource array.
1733 } 1732 }
1734 return index; 1733 return index;
1735 } 1734 }
1736 1735
1737 // Sift through annotations, looking for those significant to the VM: 1736 // Sift through annotations, looking for those significant to the VM:
1738 void ClassFileParser::parse_annotations(u1* buffer, int limit, 1737 void ClassFileParser::parse_annotations(ClassLoaderData* loader_data,
1738 u1* buffer, int limit,
1739 constantPoolHandle cp, 1739 constantPoolHandle cp,
1740 ClassFileParser::AnnotationCollector* coll, 1740 ClassFileParser::AnnotationCollector* coll,
1741 TRAPS) { 1741 TRAPS) {
1742 // annotations := do(nann:u2) {annotation} 1742 // annotations := do(nann:u2) {annotation}
1743 int index = 0; 1743 int index = 0;
1750 tag_off = 6, // u1 such as 'c' (type) or 'e' (enum) 1750 tag_off = 6, // u1 such as 'c' (type) or 'e' (enum)
1751 e_tag_val = 'e', 1751 e_tag_val = 'e',
1752 e_type_off = 7, // utf8 such as 'Ljava/lang/annotation/RetentionPolicy;' 1752 e_type_off = 7, // utf8 such as 'Ljava/lang/annotation/RetentionPolicy;'
1753 e_con_off = 9, // utf8 payload, such as 'SOURCE', 'CLASS', 'RUNTIME' 1753 e_con_off = 9, // utf8 payload, such as 'SOURCE', 'CLASS', 'RUNTIME'
1754 e_size = 11, // end of 'e' annotation 1754 e_size = 11, // end of 'e' annotation
1755 c_tag_val = 'c', 1755 c_tag_val = 'c', // payload is type
1756 c_con_off = 7, // utf8 payload, such as 'I' or 'Ljava/lang/String;' 1756 c_con_off = 7, // utf8 payload, such as 'I'
1757 c_size = 9, // end of 'c' annotation 1757 c_size = 9, // end of 'c' annotation
1758 s_tag_val = 's', // payload is String
1759 s_con_off = 7, // utf8 payload, such as 'Ljava/lang/String;'
1760 s_size = 9,
1758 min_size = 6 // smallest possible size (zero members) 1761 min_size = 6 // smallest possible size (zero members)
1759 }; 1762 };
1760 while ((--nann) >= 0 && (index-2 + min_size <= limit)) { 1763 while ((--nann) >= 0 && (index-2 + min_size <= limit)) {
1761 int index0 = index; 1764 int index0 = index;
1762 index = skip_annotation(buffer, limit, index); 1765 index = skip_annotation(buffer, limit, index);
1771 member = check_symbol_at(cp, member_index); 1774 member = check_symbol_at(cp, member_index);
1772 if (member == NULL) break; // invalid member name 1775 if (member == NULL) break; // invalid member name
1773 } 1776 }
1774 1777
1775 // Here is where parsing particular annotations will take place. 1778 // Here is where parsing particular annotations will take place.
1776 AnnotationCollector::ID id = coll->annotation_index(aname); 1779 AnnotationCollector::ID id = coll->annotation_index(loader_data, aname);
1777 if (id == AnnotationCollector::_unknown) continue; 1780 if (id == AnnotationCollector::_unknown) continue;
1778 coll->set_annotation(id); 1781 coll->set_annotation(id);
1779 // If there are no values, just set the bit and move on: 1782
1780 if (count == 0) continue; 1783 if (id == AnnotationCollector::_sun_misc_Contended) {
1781 1784 if (count == 1
1782 // For the record, here is how annotation payloads can be collected. 1785 && s_size == (index - index0) // match size
1783 // Suppose we want to capture @Retention.value. Here is how: 1786 && s_tag_val == *(abase + tag_off)
1784 //if (id == AnnotationCollector::_class_Retention) { 1787 && member == vmSymbols::value_name()) {
1785 // Symbol* payload = NULL; 1788 u2 group_index = Bytes::get_Java_u2(abase + s_con_off);
1786 // if (count == 1 1789 coll->set_contended_group(group_index);
1787 // && e_size == (index0 - index) // match size 1790 } else {
1788 // && e_tag_val == *(abase + tag_off) 1791 coll->set_contended_group(0); // default contended group
1789 // && (check_symbol_at(cp, Bytes::get_Java_u2(abase + e_type_off)) 1792 }
1790 // == vmSymbols::RetentionPolicy_signature()) 1793 coll->set_contended(true);
1791 // && member == vmSymbols::value_name()) { 1794 } else {
1792 // payload = check_symbol_at(cp, Bytes::get_Java_u2(abase + e_con_off)); 1795 coll->set_contended(false);
1793 // } 1796 }
1794 // check_property(payload != NULL, 1797 }
1795 // "Invalid @Retention annotation at offset %u in class file %s", 1798 }
1796 // index0, CHECK); 1799
1797 // if (payload != NULL) { 1800 ClassFileParser::AnnotationCollector::ID
1798 // payload->increment_refcount(); 1801 ClassFileParser::AnnotationCollector::annotation_index(ClassLoaderData* loader_data,
1799 // coll->_class_RetentionPolicy = payload; 1802 Symbol* name) {
1800 // }
1801 //}
1802 }
1803 }
1804
1805 ClassFileParser::AnnotationCollector::ID ClassFileParser::AnnotationCollector::annotation_index(Symbol* name) {
1806 vmSymbols::SID sid = vmSymbols::find_sid(name); 1803 vmSymbols::SID sid = vmSymbols::find_sid(name);
1804 bool privileged = false;
1805 if (loader_data->is_the_null_class_loader_data()) {
1806 // Privileged code can use all annotations. Other code silently drops some.
1807 privileged = true;
1808 }
1807 switch (sid) { 1809 switch (sid) {
1808 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_ForceInline_signature): 1810 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_ForceInline_signature):
1809 if (_location != _in_method) break; // only allow for methods 1811 if (_location != _in_method) break; // only allow for methods
1812 if (!privileged) break; // only allow in privileged code
1810 return _method_ForceInline; 1813 return _method_ForceInline;
1811 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_DontInline_signature): 1814 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_DontInline_signature):
1812 if (_location != _in_method) break; // only allow for methods 1815 if (_location != _in_method) break; // only allow for methods
1816 if (!privileged) break; // only allow in privileged code
1813 return _method_DontInline; 1817 return _method_DontInline;
1814 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_LambdaForm_Compiled_signature): 1818 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_LambdaForm_Compiled_signature):
1815 if (_location != _in_method) break; // only allow for methods 1819 if (_location != _in_method) break; // only allow for methods
1820 if (!privileged) break; // only allow in privileged code
1816 return _method_LambdaForm_Compiled; 1821 return _method_LambdaForm_Compiled;
1817 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_LambdaForm_Hidden_signature): 1822 case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_LambdaForm_Hidden_signature):
1818 if (_location != _in_method) break; // only allow for methods 1823 if (_location != _in_method) break; // only allow for methods
1824 if (!privileged) break; // only allow in privileged code
1819 return _method_LambdaForm_Hidden; 1825 return _method_LambdaForm_Hidden;
1826 case vmSymbols::VM_SYMBOL_ENUM_NAME(sun_misc_Contended_signature):
1827 if (_location != _in_field && _location != _in_class) break; // only allow for fields and classes
1828 if (!EnableContended || (RestrictContended && !privileged)) break; // honor privileges
1829 return _sun_misc_Contended;
1820 default: break; 1830 default: break;
1821 } 1831 }
1822 return AnnotationCollector::_unknown; 1832 return AnnotationCollector::_unknown;
1823 } 1833 }
1824 1834
1825 void ClassFileParser::FieldAnnotationCollector::apply_to(FieldInfo* f) { 1835 void ClassFileParser::FieldAnnotationCollector::apply_to(FieldInfo* f) {
1826 fatal("no field annotations yet"); 1836 if (is_contended())
1837 f->set_contended_group(contended_group());
1827 } 1838 }
1828 1839
1829 void ClassFileParser::MethodAnnotationCollector::apply_to(methodHandle m) { 1840 void ClassFileParser::MethodAnnotationCollector::apply_to(methodHandle m) {
1830 if (has_annotation(_method_ForceInline)) 1841 if (has_annotation(_method_ForceInline))
1831 m->set_force_inline(true); 1842 m->set_force_inline(true);
1836 if (has_annotation(_method_LambdaForm_Hidden)) 1847 if (has_annotation(_method_LambdaForm_Hidden))
1837 m->set_hidden(true); 1848 m->set_hidden(true);
1838 } 1849 }
1839 1850
1840 void ClassFileParser::ClassAnnotationCollector::apply_to(instanceKlassHandle k) { 1851 void ClassFileParser::ClassAnnotationCollector::apply_to(instanceKlassHandle k) {
1841 fatal("no class annotations yet"); 1852 k->set_is_contended(is_contended());
1842 } 1853 }
1843 1854
1844 1855
1845 #define MAX_ARGS_SIZE 255 1856 #define MAX_ARGS_SIZE 255
1846 #define MAX_CODE_SIZE 65535 1857 #define MAX_CODE_SIZE 65535
2179 generic_signature_index = cfs->get_u2_fast(); 2190 generic_signature_index = cfs->get_u2_fast();
2180 } else if (method_attribute_name == vmSymbols::tag_runtime_visible_annotations()) { 2191 } else if (method_attribute_name == vmSymbols::tag_runtime_visible_annotations()) {
2181 runtime_visible_annotations_length = method_attribute_length; 2192 runtime_visible_annotations_length = method_attribute_length;
2182 runtime_visible_annotations = cfs->get_u1_buffer(); 2193 runtime_visible_annotations = cfs->get_u1_buffer();
2183 assert(runtime_visible_annotations != NULL, "null visible annotations"); 2194 assert(runtime_visible_annotations != NULL, "null visible annotations");
2184 parse_annotations(runtime_visible_annotations, 2195 parse_annotations(loader_data,
2196 runtime_visible_annotations,
2185 runtime_visible_annotations_length, cp, &parsed_annotations, 2197 runtime_visible_annotations_length, cp, &parsed_annotations,
2186 CHECK_(nullHandle)); 2198 CHECK_(nullHandle));
2187 cfs->skip_u1(runtime_visible_annotations_length, CHECK_(nullHandle)); 2199 cfs->skip_u1(runtime_visible_annotations_length, CHECK_(nullHandle));
2188 } else if (PreserveAllAnnotations && method_attribute_name == vmSymbols::tag_runtime_invisible_annotations()) { 2200 } else if (PreserveAllAnnotations && method_attribute_name == vmSymbols::tag_runtime_invisible_annotations()) {
2189 runtime_invisible_annotations_length = method_attribute_length; 2201 runtime_invisible_annotations_length = method_attribute_length;
2884 parse_classfile_signature_attribute(cp, CHECK); 2896 parse_classfile_signature_attribute(cp, CHECK);
2885 } else if (tag == vmSymbols::tag_runtime_visible_annotations()) { 2897 } else if (tag == vmSymbols::tag_runtime_visible_annotations()) {
2886 runtime_visible_annotations_length = attribute_length; 2898 runtime_visible_annotations_length = attribute_length;
2887 runtime_visible_annotations = cfs->get_u1_buffer(); 2899 runtime_visible_annotations = cfs->get_u1_buffer();
2888 assert(runtime_visible_annotations != NULL, "null visible annotations"); 2900 assert(runtime_visible_annotations != NULL, "null visible annotations");
2889 parse_annotations(runtime_visible_annotations, 2901 parse_annotations(loader_data,
2902 runtime_visible_annotations,
2890 runtime_visible_annotations_length, 2903 runtime_visible_annotations_length,
2891 cp, 2904 cp,
2892 parsed_annotations, 2905 parsed_annotations,
2893 CHECK); 2906 CHECK);
2894 cfs->skip_u1(runtime_visible_annotations_length, CHECK); 2907 cfs->skip_u1(runtime_visible_annotations_length, CHECK);
3403 CHECK_(nullHandle)); 3416 CHECK_(nullHandle));
3404 3417
3405 // Size of Java itable (in words) 3418 // Size of Java itable (in words)
3406 itable_size = access_flags.is_interface() ? 0 : klassItable::compute_itable_size(transitive_interfaces); 3419 itable_size = access_flags.is_interface() ? 0 : klassItable::compute_itable_size(transitive_interfaces);
3407 3420
3421 // get the padding width from the option
3422 // TODO: Ask VM about specific CPU we are running on
3423 int pad_size = ContendedPaddingWidth;
3424
3408 // Field size and offset computation 3425 // Field size and offset computation
3409 int nonstatic_field_size = super_klass() == NULL ? 0 : super_klass->nonstatic_field_size(); 3426 int nonstatic_field_size = super_klass() == NULL ? 0 : super_klass->nonstatic_field_size();
3410 #ifndef PRODUCT 3427 #ifndef PRODUCT
3411 int orig_nonstatic_field_size = 0; 3428 int orig_nonstatic_field_size = 0;
3412 #endif 3429 #endif
3413 int static_field_size = 0;
3414 int next_static_oop_offset; 3430 int next_static_oop_offset;
3415 int next_static_double_offset; 3431 int next_static_double_offset;
3416 int next_static_word_offset; 3432 int next_static_word_offset;
3417 int next_static_short_offset; 3433 int next_static_short_offset;
3418 int next_static_byte_offset; 3434 int next_static_byte_offset;
3419 int next_static_type_offset; 3435 int next_static_padded_offset;
3420 int next_nonstatic_oop_offset; 3436 int next_nonstatic_oop_offset;
3421 int next_nonstatic_double_offset; 3437 int next_nonstatic_double_offset;
3422 int next_nonstatic_word_offset; 3438 int next_nonstatic_word_offset;
3423 int next_nonstatic_short_offset; 3439 int next_nonstatic_short_offset;
3424 int next_nonstatic_byte_offset; 3440 int next_nonstatic_byte_offset;
3425 int next_nonstatic_type_offset; 3441 int next_nonstatic_type_offset;
3426 int first_nonstatic_oop_offset; 3442 int first_nonstatic_oop_offset;
3427 int first_nonstatic_field_offset; 3443 int first_nonstatic_field_offset;
3428 int next_nonstatic_field_offset; 3444 int next_nonstatic_field_offset;
3445 int next_nonstatic_padded_offset;
3446
3447 // Count the contended fields by type.
3448 int static_contended_count = 0;
3449 int nonstatic_contended_count = 0;
3450 FieldAllocationCount fac_contended;
3451 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
3452 FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3453 if (fs.is_contended()) {
3454 fac_contended.count[atype]++;
3455 if (fs.access_flags().is_static()) {
3456 static_contended_count++;
3457 } else {
3458 nonstatic_contended_count++;
3459 }
3460 }
3461 }
3462 int contended_count = static_contended_count + nonstatic_contended_count;
3463
3429 3464
3430 // Calculate the starting byte offsets 3465 // Calculate the starting byte offsets
3431 next_static_oop_offset = InstanceMirrorKlass::offset_of_static_fields(); 3466 next_static_oop_offset = InstanceMirrorKlass::offset_of_static_fields();
3467
3468 // class is contended, pad before all the fields
3469 if (parsed_annotations.is_contended()) {
3470 next_static_oop_offset += pad_size;
3471 }
3472
3432 next_static_double_offset = next_static_oop_offset + 3473 next_static_double_offset = next_static_oop_offset +
3433 (fac.count[STATIC_OOP] * heapOopSize); 3474 ((fac.count[STATIC_OOP] - fac_contended.count[STATIC_OOP]) * heapOopSize);
3434 if ( fac.count[STATIC_DOUBLE] && 3475 if ( fac.count[STATIC_DOUBLE] &&
3435 (Universe::field_type_should_be_aligned(T_DOUBLE) || 3476 (Universe::field_type_should_be_aligned(T_DOUBLE) ||
3436 Universe::field_type_should_be_aligned(T_LONG)) ) { 3477 Universe::field_type_should_be_aligned(T_LONG)) ) {
3437 next_static_double_offset = align_size_up(next_static_double_offset, BytesPerLong); 3478 next_static_double_offset = align_size_up(next_static_double_offset, BytesPerLong);
3438 } 3479 }
3439 3480
3440 next_static_word_offset = next_static_double_offset + 3481 next_static_word_offset = next_static_double_offset +
3441 (fac.count[STATIC_DOUBLE] * BytesPerLong); 3482 ((fac.count[STATIC_DOUBLE] - fac_contended.count[STATIC_DOUBLE]) * BytesPerLong);
3442 next_static_short_offset = next_static_word_offset + 3483 next_static_short_offset = next_static_word_offset +
3443 (fac.count[STATIC_WORD] * BytesPerInt); 3484 ((fac.count[STATIC_WORD] - fac_contended.count[STATIC_WORD]) * BytesPerInt);
3444 next_static_byte_offset = next_static_short_offset + 3485 next_static_byte_offset = next_static_short_offset +
3445 (fac.count[STATIC_SHORT] * BytesPerShort); 3486 ((fac.count[STATIC_SHORT] - fac_contended.count[STATIC_SHORT]) * BytesPerShort);
3446 next_static_type_offset = align_size_up((next_static_byte_offset + 3487 next_static_padded_offset = next_static_byte_offset +
3447 fac.count[STATIC_BYTE] ), wordSize ); 3488 ((fac.count[STATIC_BYTE] - fac_contended.count[STATIC_BYTE]) * 1);
3448 static_field_size = (next_static_type_offset -
3449 next_static_oop_offset) / wordSize;
3450 3489
3451 first_nonstatic_field_offset = instanceOopDesc::base_offset_in_bytes() + 3490 first_nonstatic_field_offset = instanceOopDesc::base_offset_in_bytes() +
3452 nonstatic_field_size * heapOopSize; 3491 nonstatic_field_size * heapOopSize;
3492
3493 // class is contended, pad before all the fields
3494 if (parsed_annotations.is_contended()) {
3495 first_nonstatic_field_offset += pad_size;
3496 }
3497
3453 next_nonstatic_field_offset = first_nonstatic_field_offset; 3498 next_nonstatic_field_offset = first_nonstatic_field_offset;
3454 3499
3455 unsigned int nonstatic_double_count = fac.count[NONSTATIC_DOUBLE]; 3500 unsigned int nonstatic_double_count = fac.count[NONSTATIC_DOUBLE] - fac_contended.count[NONSTATIC_DOUBLE];
3456 unsigned int nonstatic_word_count = fac.count[NONSTATIC_WORD]; 3501 unsigned int nonstatic_word_count = fac.count[NONSTATIC_WORD] - fac_contended.count[NONSTATIC_WORD];
3457 unsigned int nonstatic_short_count = fac.count[NONSTATIC_SHORT]; 3502 unsigned int nonstatic_short_count = fac.count[NONSTATIC_SHORT] - fac_contended.count[NONSTATIC_SHORT];
3458 unsigned int nonstatic_byte_count = fac.count[NONSTATIC_BYTE]; 3503 unsigned int nonstatic_byte_count = fac.count[NONSTATIC_BYTE] - fac_contended.count[NONSTATIC_BYTE];
3459 unsigned int nonstatic_oop_count = fac.count[NONSTATIC_OOP]; 3504 unsigned int nonstatic_oop_count = fac.count[NONSTATIC_OOP] - fac_contended.count[NONSTATIC_OOP];
3460 3505
3461 bool super_has_nonstatic_fields = 3506 bool super_has_nonstatic_fields =
3462 (super_klass() != NULL && super_klass->has_nonstatic_fields()); 3507 (super_klass() != NULL && super_klass->has_nonstatic_fields());
3463 bool has_nonstatic_fields = super_has_nonstatic_fields || 3508 bool has_nonstatic_fields = super_has_nonstatic_fields ||
3464 ((nonstatic_double_count + nonstatic_word_count + 3509 ((nonstatic_double_count + nonstatic_word_count +
3527 allocation_style = 0; // Allocate oops first 3572 allocation_style = 0; // Allocate oops first
3528 compact_fields = false; // Don't compact fields 3573 compact_fields = false; // Don't compact fields
3529 } 3574 }
3530 3575
3531 if( allocation_style == 0 ) { 3576 if( allocation_style == 0 ) {
3532 // Fields order: oops, longs/doubles, ints, shorts/chars, bytes 3577 // Fields order: oops, longs/doubles, ints, shorts/chars, bytes, padded fields
3533 next_nonstatic_oop_offset = next_nonstatic_field_offset; 3578 next_nonstatic_oop_offset = next_nonstatic_field_offset;
3534 next_nonstatic_double_offset = next_nonstatic_oop_offset + 3579 next_nonstatic_double_offset = next_nonstatic_oop_offset +
3535 (nonstatic_oop_count * heapOopSize); 3580 (nonstatic_oop_count * heapOopSize);
3536 } else if( allocation_style == 1 ) { 3581 } else if( allocation_style == 1 ) {
3537 // Fields order: longs/doubles, ints, shorts/chars, bytes, oops 3582 // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields
3538 next_nonstatic_double_offset = next_nonstatic_field_offset; 3583 next_nonstatic_double_offset = next_nonstatic_field_offset;
3539 } else if( allocation_style == 2 ) { 3584 } else if( allocation_style == 2 ) {
3540 // Fields allocation: oops fields in super and sub classes are together. 3585 // Fields allocation: oops fields in super and sub classes are together.
3541 if( nonstatic_field_size > 0 && super_klass() != NULL && 3586 if( nonstatic_field_size > 0 && super_klass() != NULL &&
3542 super_klass->nonstatic_oop_map_size() > 0 ) { 3587 super_klass->nonstatic_oop_map_size() > 0 ) {
3611 (nonstatic_double_count * BytesPerLong); 3656 (nonstatic_double_count * BytesPerLong);
3612 next_nonstatic_short_offset = next_nonstatic_word_offset + 3657 next_nonstatic_short_offset = next_nonstatic_word_offset +
3613 (nonstatic_word_count * BytesPerInt); 3658 (nonstatic_word_count * BytesPerInt);
3614 next_nonstatic_byte_offset = next_nonstatic_short_offset + 3659 next_nonstatic_byte_offset = next_nonstatic_short_offset +
3615 (nonstatic_short_count * BytesPerShort); 3660 (nonstatic_short_count * BytesPerShort);
3616 3661 next_nonstatic_padded_offset = next_nonstatic_byte_offset +
3617 int notaligned_offset; 3662 nonstatic_byte_count;
3618 if( allocation_style == 0 ) { 3663
3619 notaligned_offset = next_nonstatic_byte_offset + nonstatic_byte_count; 3664 // let oops jump before padding with this allocation style
3620 } else { // allocation_style == 1 3665 if( allocation_style == 1 ) {
3621 next_nonstatic_oop_offset = next_nonstatic_byte_offset + nonstatic_byte_count; 3666 next_nonstatic_oop_offset = next_nonstatic_padded_offset;
3622 if( nonstatic_oop_count > 0 ) { 3667 if( nonstatic_oop_count > 0 ) {
3623 next_nonstatic_oop_offset = align_size_up(next_nonstatic_oop_offset, heapOopSize); 3668 next_nonstatic_oop_offset = align_size_up(next_nonstatic_oop_offset, heapOopSize);
3624 } 3669 }
3625 notaligned_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize); 3670 next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
3626 } 3671 }
3627 next_nonstatic_type_offset = align_size_up(notaligned_offset, heapOopSize );
3628 nonstatic_field_size = nonstatic_field_size + ((next_nonstatic_type_offset
3629 - first_nonstatic_field_offset)/heapOopSize);
3630 3672
3631 // Iterate over fields again and compute correct offsets. 3673 // Iterate over fields again and compute correct offsets.
3632 // The field allocation type was temporarily stored in the offset slot. 3674 // The field allocation type was temporarily stored in the offset slot.
3633 // oop fields are located before non-oop fields (static and non-static). 3675 // oop fields are located before non-oop fields (static and non-static).
3634 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) { 3676 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
3677
3678 // skip already laid out fields
3679 if (fs.is_offset_set()) continue;
3680
3681 // contended fields are handled below
3682 if (fs.is_contended()) continue;
3683
3635 int real_offset; 3684 int real_offset;
3636 FieldAllocationType atype = (FieldAllocationType) fs.offset(); 3685 FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3686
3687 // pack the rest of the fields
3637 switch (atype) { 3688 switch (atype) {
3638 case STATIC_OOP: 3689 case STATIC_OOP:
3639 real_offset = next_static_oop_offset; 3690 real_offset = next_static_oop_offset;
3640 next_static_oop_offset += heapOopSize; 3691 next_static_oop_offset += heapOopSize;
3641 break; 3692 break;
3720 ShouldNotReachHere(); 3771 ShouldNotReachHere();
3721 } 3772 }
3722 fs.set_offset(real_offset); 3773 fs.set_offset(real_offset);
3723 } 3774 }
3724 3775
3776
3777 // Handle the contended cases.
3778 //
3779 // Each contended field should not intersect the cache line with another contended field.
3780 // In the absence of alignment information, we end up with pessimistically separating
3781 // the fields with full-width padding.
3782 //
3783 // Additionally, this should not break alignment for the fields, so we round the alignment up
3784 // for each field.
3785 if (contended_count > 0) {
3786
3787 // if there is at least one contended field, we need to have pre-padding for them
3788 if (nonstatic_contended_count > 0) {
3789 next_nonstatic_padded_offset += pad_size;
3790 }
3791
3792 // collect all contended groups
3793 BitMap bm(cp->size());
3794 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
3795 // skip already laid out fields
3796 if (fs.is_offset_set()) continue;
3797
3798 if (fs.is_contended()) {
3799 bm.set_bit(fs.contended_group());
3800 }
3801 }
3802
3803 int current_group = -1;
3804 while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
3805
3806 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
3807
3808 // skip already laid out fields
3809 if (fs.is_offset_set()) continue;
3810
3811 // skip non-contended fields and fields from different group
3812 if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
3813
3814 // handle statics below
3815 if (fs.access_flags().is_static()) continue;
3816
3817 int real_offset;
3818 FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3819
3820 switch (atype) {
3821 case NONSTATIC_BYTE:
3822 next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, 1);
3823 real_offset = next_nonstatic_padded_offset;
3824 next_nonstatic_padded_offset += 1;
3825 break;
3826
3827 case NONSTATIC_SHORT:
3828 next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerShort);
3829 real_offset = next_nonstatic_padded_offset;
3830 next_nonstatic_padded_offset += BytesPerShort;
3831 break;
3832
3833 case NONSTATIC_WORD:
3834 next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerInt);
3835 real_offset = next_nonstatic_padded_offset;
3836 next_nonstatic_padded_offset += BytesPerInt;
3837 break;
3838
3839 case NONSTATIC_DOUBLE:
3840 next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerLong);
3841 real_offset = next_nonstatic_padded_offset;
3842 next_nonstatic_padded_offset += BytesPerLong;
3843 break;
3844
3845 case NONSTATIC_OOP:
3846 next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, heapOopSize);
3847 real_offset = next_nonstatic_padded_offset;
3848 next_nonstatic_padded_offset += heapOopSize;
3849
3850 // Create new oop map
3851 nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset;
3852 nonstatic_oop_counts [nonstatic_oop_map_count] = 1;
3853 nonstatic_oop_map_count += 1;
3854 if( first_nonstatic_oop_offset == 0 ) { // Undefined
3855 first_nonstatic_oop_offset = real_offset;
3856 }
3857 break;
3858
3859 default:
3860 ShouldNotReachHere();
3861 }
3862
3863 if (fs.contended_group() == 0) {
3864 // Contended group defines the equivalence class over the fields:
3865 // the fields within the same contended group are not inter-padded.
3866 // The only exception is default group, which does not incur the
3867 // equivalence, and so requires intra-padding.
3868 next_nonstatic_padded_offset += pad_size;
3869 }
3870
3871 fs.set_offset(real_offset);
3872 } // for
3873
3874 // Start laying out the next group.
3875 // Note that this will effectively pad the last group in the back;
3876 // this is expected to alleviate memory contention effects for
3877 // subclass fields and/or adjacent object.
3878 // If this was the default group, the padding is already in place.
3879 if (current_group != 0) {
3880 next_nonstatic_padded_offset += pad_size;
3881 }
3882 }
3883
3884 // handle static fields
3885
3886 // if there is at least one contended field, we need to have pre-padding for them
3887 if (static_contended_count > 0) {
3888 next_static_padded_offset += pad_size;
3889 }
3890
3891 current_group = -1;
3892 while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
3893
3894 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
3895
3896 // skip already laid out fields
3897 if (fs.is_offset_set()) continue;
3898
3899 // skip non-contended fields and fields from different group
3900 if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
3901
3902 // non-statics already handled above
3903 if (!fs.access_flags().is_static()) continue;
3904
3905 int real_offset;
3906 FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3907
3908 switch (atype) {
3909
3910 case STATIC_BYTE:
3911 next_static_padded_offset = align_size_up(next_static_padded_offset, 1);
3912 real_offset = next_static_padded_offset;
3913 next_static_padded_offset += 1;
3914 break;
3915
3916 case STATIC_SHORT:
3917 next_static_padded_offset = align_size_up(next_static_padded_offset, BytesPerShort);
3918 real_offset = next_static_padded_offset;
3919 next_static_padded_offset += BytesPerShort;
3920 break;
3921
3922 case STATIC_WORD:
3923 next_static_padded_offset = align_size_up(next_static_padded_offset, BytesPerInt);
3924 real_offset = next_static_padded_offset;
3925 next_static_padded_offset += BytesPerInt;
3926 break;
3927
3928 case STATIC_DOUBLE:
3929 next_static_padded_offset = align_size_up(next_static_padded_offset, BytesPerLong);
3930 real_offset = next_static_padded_offset;
3931 next_static_padded_offset += BytesPerLong;
3932 break;
3933
3934 case STATIC_OOP:
3935 next_static_padded_offset = align_size_up(next_static_padded_offset, heapOopSize);
3936 real_offset = next_static_padded_offset;
3937 next_static_padded_offset += heapOopSize;
3938 break;
3939
3940 default:
3941 ShouldNotReachHere();
3942 }
3943
3944 if (fs.contended_group() == 0) {
3945 // Contended group defines the equivalence class over the fields:
3946 // the fields within the same contended group are not inter-padded.
3947 // The only exception is default group, which does not incur the
3948 // equivalence, and so requires intra-padding.
3949 next_static_padded_offset += pad_size;
3950 }
3951
3952 fs.set_offset(real_offset);
3953 } // for
3954
3955 // Start laying out the next group.
3956 // Note that this will effectively pad the last group in the back;
3957 // this is expected to alleviate memory contention effects for
3958 // subclass fields and/or adjacent object.
3959 // If this was the default group, the padding is already in place.
3960 if (current_group != 0) {
3961 next_static_padded_offset += pad_size;
3962 }
3963
3964 }
3965
3966 } // handle contended
3967
3725 // Size of instances 3968 // Size of instances
3726 int instance_size; 3969 int instance_size;
3727 3970
3971 int notaligned_offset = next_nonstatic_padded_offset;
3972
3973 // Entire class is contended, pad in the back.
3974 // This helps to alleviate memory contention effects for subclass fields
3975 // and/or adjacent object.
3976 if (parsed_annotations.is_contended()) {
3977 notaligned_offset += pad_size;
3978 next_static_padded_offset += pad_size;
3979 }
3980
3981 int next_static_type_offset = align_size_up(next_static_padded_offset, wordSize);
3982 int static_field_size = (next_static_type_offset -
3983 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
3984
3985 next_nonstatic_type_offset = align_size_up(notaligned_offset, heapOopSize );
3986 nonstatic_field_size = nonstatic_field_size + ((next_nonstatic_type_offset
3987 - first_nonstatic_field_offset)/heapOopSize);
3988
3728 next_nonstatic_type_offset = align_size_up(notaligned_offset, wordSize ); 3989 next_nonstatic_type_offset = align_size_up(notaligned_offset, wordSize );
3729 instance_size = align_object_size(next_nonstatic_type_offset / wordSize); 3990 instance_size = align_object_size(next_nonstatic_type_offset / wordSize);
3730 3991
3731 assert(instance_size == align_object_size(align_size_up((instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize), wordSize) / wordSize), "consistent layout helper value"); 3992 assert(instance_size == align_object_size(align_size_up(
3993 (instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize + ((parsed_annotations.is_contended()) ? pad_size : 0)),
3994 wordSize) / wordSize), "consistent layout helper value");
3732 3995
3733 // Number of non-static oop map blocks allocated at end of klass. 3996 // Number of non-static oop map blocks allocated at end of klass.
3734 const unsigned int total_oop_map_count = 3997 const unsigned int total_oop_map_count =
3735 compute_oop_map_count(super_klass, nonstatic_oop_map_count, 3998 compute_oop_map_count(super_klass, nonstatic_oop_map_count,
3736 first_nonstatic_oop_offset); 3999 first_nonstatic_oop_offset);
4006 this_klass->external_name()); 4269 this_klass->external_name());
4007 } 4270 }
4008 } 4271 }
4009 #endif 4272 #endif
4010 4273
4274 #ifndef PRODUCT
4275 if (PrintFieldLayout) {
4276 print_field_layout(name,
4277 fields,
4278 cp,
4279 instance_size,
4280 first_nonstatic_field_offset,
4281 next_nonstatic_field_offset,
4282 next_static_type_offset);
4283 }
4284 #endif
4285
4011 // preserve result across HandleMark 4286 // preserve result across HandleMark
4012 preserve_this_klass = this_klass(); 4287 preserve_this_klass = this_klass();
4013 } 4288 }
4014 4289
4015 // Create new handle outside HandleMark (might be needed for 4290 // Create new handle outside HandleMark (might be needed for
4016 // Extended Class Redefinition) 4291 // Extended Class Redefinition)
4017 instanceKlassHandle this_klass (THREAD, preserve_this_klass); 4292 instanceKlassHandle this_klass (THREAD, preserve_this_klass);
4018 debug_only(this_klass->verify();) 4293 debug_only(this_klass->verify();)
4019 4294
4020 return this_klass; 4295 return this_klass;
4296 }
4297
4298 void ClassFileParser::print_field_layout(Symbol* name,
4299 Array<u2>* fields,
4300 constantPoolHandle cp,
4301 int instance_size,
4302 int instance_fields_start,
4303 int instance_fields_end,
4304 int static_fields_end) {
4305 tty->print("%s: field layout\n", name->as_klass_external_name());
4306 tty->print(" @%3d %s\n", instance_fields_start, "--- instance fields start ---");
4307 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
4308 if (!fs.access_flags().is_static()) {
4309 tty->print(" @%3d \"%s\" %s\n",
4310 fs.offset(),
4311 fs.name()->as_klass_external_name(),
4312 fs.signature()->as_klass_external_name());
4313 }
4314 }
4315 tty->print(" @%3d %s\n", instance_fields_end, "--- instance fields end ---");
4316 tty->print(" @%3d %s\n", instance_size * wordSize, "--- instance ends ---");
4317 tty->print(" @%3d %s\n", InstanceMirrorKlass::offset_of_static_fields(), "--- static fields start ---");
4318 for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
4319 if (fs.access_flags().is_static()) {
4320 tty->print(" @%3d \"%s\" %s\n",
4321 fs.offset(),
4322 fs.name()->as_klass_external_name(),
4323 fs.signature()->as_klass_external_name());
4324 }
4325 }
4326 tty->print(" @%3d %s\n", static_fields_end, "--- static fields end ---");
4327 tty->print("\n");
4021 } 4328 }
4022 4329
4023 unsigned int 4330 unsigned int
4024 ClassFileParser::compute_oop_map_count(instanceKlassHandle super, 4331 ClassFileParser::compute_oop_map_count(instanceKlassHandle super,
4025 unsigned int nonstatic_oop_map_count, 4332 unsigned int nonstatic_oop_map_count,