annotate src/share/vm/gc_implementation/shared/gcUtil.cpp @ 10374:87c64c0438fb

6976350: G1: deal with fragmentation while copying objects during GC Summary: Create G1ParGCAllocBufferContainer to contain two buffers instead of previously using one buffer, in order to hold the first priority buffer longer. Thus, when some large objects hits the value of free space left in the first priority buffer it has an alternative to fit in the second priority buffer while the first priority buffer is given more chances to try allocating smaller objects. Overall, it will improve heap space efficiency. Reviewed-by: johnc, jmasa, brutisso Contributed-by: tamao <tao.mao@oracle.com>
author tamao
date Mon, 03 Jun 2013 14:37:13 -0700
parents b9a9ed0f8eeb
children 63a4eb8bcd23
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
0
a61af66fc99e Initial load
duke
parents:
diff changeset
1 /*
6842
b9a9ed0f8eeb 7197424: update copyright year to match last edit in jdk8 hotspot repository
mikael
parents: 6060
diff changeset
2 * Copyright (c) 2002, 2012, Oracle and/or its affiliates. All rights reserved.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
a61af66fc99e Initial load
duke
parents:
diff changeset
4 *
a61af66fc99e Initial load
duke
parents:
diff changeset
5 * This code is free software; you can redistribute it and/or modify it
a61af66fc99e Initial load
duke
parents:
diff changeset
6 * under the terms of the GNU General Public License version 2 only, as
a61af66fc99e Initial load
duke
parents:
diff changeset
7 * published by the Free Software Foundation.
a61af66fc99e Initial load
duke
parents:
diff changeset
8 *
a61af66fc99e Initial load
duke
parents:
diff changeset
9 * This code is distributed in the hope that it will be useful, but WITHOUT
a61af66fc99e Initial load
duke
parents:
diff changeset
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
a61af66fc99e Initial load
duke
parents:
diff changeset
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
a61af66fc99e Initial load
duke
parents:
diff changeset
12 * version 2 for more details (a copy is included in the LICENSE file that
a61af66fc99e Initial load
duke
parents:
diff changeset
13 * accompanied this code).
a61af66fc99e Initial load
duke
parents:
diff changeset
14 *
a61af66fc99e Initial load
duke
parents:
diff changeset
15 * You should have received a copy of the GNU General Public License version
a61af66fc99e Initial load
duke
parents:
diff changeset
16 * 2 along with this work; if not, write to the Free Software Foundation,
a61af66fc99e Initial load
duke
parents:
diff changeset
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
a61af66fc99e Initial load
duke
parents:
diff changeset
18 *
1552
c18cbe5936b8 6941466: Oracle rebranding changes for Hotspot repositories
trims
parents: 1145
diff changeset
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
c18cbe5936b8 6941466: Oracle rebranding changes for Hotspot repositories
trims
parents: 1145
diff changeset
20 * or visit www.oracle.com if you need additional information or have any
c18cbe5936b8 6941466: Oracle rebranding changes for Hotspot repositories
trims
parents: 1145
diff changeset
21 * questions.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
22 *
a61af66fc99e Initial load
duke
parents:
diff changeset
23 */
a61af66fc99e Initial load
duke
parents:
diff changeset
24
1972
f95d63e2154a 6989984: Use standard include model for Hospot
stefank
parents: 1552
diff changeset
25 #include "precompiled.hpp"
f95d63e2154a 6989984: Use standard include model for Hospot
stefank
parents: 1552
diff changeset
26 #include "gc_implementation/shared/gcUtil.hpp"
0
a61af66fc99e Initial load
duke
parents:
diff changeset
27
a61af66fc99e Initial load
duke
parents:
diff changeset
28 // Catch-all file for utility classes
a61af66fc99e Initial load
duke
parents:
diff changeset
29
a61af66fc99e Initial load
duke
parents:
diff changeset
30 float AdaptiveWeightedAverage::compute_adaptive_average(float new_sample,
a61af66fc99e Initial load
duke
parents:
diff changeset
31 float average) {
a61af66fc99e Initial load
duke
parents:
diff changeset
32 // We smooth the samples by not using weight() directly until we've
a61af66fc99e Initial load
duke
parents:
diff changeset
33 // had enough data to make it meaningful. We'd like the first weight
6060
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
34 // used to be 1, the second to be 1/2, etc until we have
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
35 // OLD_THRESHOLD/weight samples.
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
36 unsigned count_weight = 0;
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
37
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
38 // Avoid division by zero if the counter wraps (7158457)
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
39 if (!is_old()) {
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
40 count_weight = OLD_THRESHOLD/count();
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
41 }
78a1b285cda8 7158457: division by zero in adaptiveweightedaverage
mikael
parents: 2426
diff changeset
42
0
a61af66fc99e Initial load
duke
parents:
diff changeset
43 unsigned adaptive_weight = (MAX2(weight(), count_weight));
a61af66fc99e Initial load
duke
parents:
diff changeset
44
a61af66fc99e Initial load
duke
parents:
diff changeset
45 float new_avg = exp_avg(average, new_sample, adaptive_weight);
a61af66fc99e Initial load
duke
parents:
diff changeset
46
a61af66fc99e Initial load
duke
parents:
diff changeset
47 return new_avg;
a61af66fc99e Initial load
duke
parents:
diff changeset
48 }
a61af66fc99e Initial load
duke
parents:
diff changeset
49
a61af66fc99e Initial load
duke
parents:
diff changeset
50 void AdaptiveWeightedAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
51 increment_count();
a61af66fc99e Initial load
duke
parents:
diff changeset
52
a61af66fc99e Initial load
duke
parents:
diff changeset
53 // Compute the new weighted average
a61af66fc99e Initial load
duke
parents:
diff changeset
54 float new_avg = compute_adaptive_average(new_sample, average());
a61af66fc99e Initial load
duke
parents:
diff changeset
55 set_average(new_avg);
a61af66fc99e Initial load
duke
parents:
diff changeset
56 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
57 }
a61af66fc99e Initial load
duke
parents:
diff changeset
58
1145
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
59 void AdaptiveWeightedAverage::print() const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
60 print_on(tty);
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
61 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
62
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
63 void AdaptiveWeightedAverage::print_on(outputStream* st) const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
64 guarantee(false, "NYI");
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
65 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
66
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
67 void AdaptivePaddedAverage::print() const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
68 print_on(tty);
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
69 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
70
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
71 void AdaptivePaddedAverage::print_on(outputStream* st) const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
72 guarantee(false, "NYI");
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
73 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
74
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
75 void AdaptivePaddedNoZeroDevAverage::print() const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
76 print_on(tty);
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
77 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
78
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
79 void AdaptivePaddedNoZeroDevAverage::print_on(outputStream* st) const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
80 guarantee(false, "NYI");
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
81 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
82
0
a61af66fc99e Initial load
duke
parents:
diff changeset
83 void AdaptivePaddedAverage::sample(float new_sample) {
1145
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
84 // Compute new adaptive weighted average based on new sample.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
85 AdaptiveWeightedAverage::sample(new_sample);
a61af66fc99e Initial load
duke
parents:
diff changeset
86
1145
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
87 // Now update the deviation and the padded average.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
88 float new_avg = average();
a61af66fc99e Initial load
duke
parents:
diff changeset
89 float new_dev = compute_adaptive_average(fabsd(new_sample - new_avg),
a61af66fc99e Initial load
duke
parents:
diff changeset
90 deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
91 set_deviation(new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
92 set_padded_average(new_avg + padding() * new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
93 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
94 }
a61af66fc99e Initial load
duke
parents:
diff changeset
95
a61af66fc99e Initial load
duke
parents:
diff changeset
96 void AdaptivePaddedNoZeroDevAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
97 // Compute our parent classes sample information
a61af66fc99e Initial load
duke
parents:
diff changeset
98 AdaptiveWeightedAverage::sample(new_sample);
a61af66fc99e Initial load
duke
parents:
diff changeset
99
a61af66fc99e Initial load
duke
parents:
diff changeset
100 float new_avg = average();
a61af66fc99e Initial load
duke
parents:
diff changeset
101 if (new_sample != 0) {
a61af66fc99e Initial load
duke
parents:
diff changeset
102 // We only create a new deviation if the sample is non-zero
a61af66fc99e Initial load
duke
parents:
diff changeset
103 float new_dev = compute_adaptive_average(fabsd(new_sample - new_avg),
a61af66fc99e Initial load
duke
parents:
diff changeset
104 deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
105
a61af66fc99e Initial load
duke
parents:
diff changeset
106 set_deviation(new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
107 }
a61af66fc99e Initial load
duke
parents:
diff changeset
108 set_padded_average(new_avg + padding() * deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
109 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
110 }
a61af66fc99e Initial load
duke
parents:
diff changeset
111
a61af66fc99e Initial load
duke
parents:
diff changeset
112 LinearLeastSquareFit::LinearLeastSquareFit(unsigned weight) :
2189
176d0be30214 7016998: gcutil class LinearLeastSquareFit doesn't initialize some of its fields
phh
parents: 1972
diff changeset
113 _sum_x(0), _sum_x_squared(0), _sum_y(0), _sum_xy(0),
176d0be30214 7016998: gcutil class LinearLeastSquareFit doesn't initialize some of its fields
phh
parents: 1972
diff changeset
114 _intercept(0), _slope(0), _mean_x(weight), _mean_y(weight) {}
0
a61af66fc99e Initial load
duke
parents:
diff changeset
115
a61af66fc99e Initial load
duke
parents:
diff changeset
116 void LinearLeastSquareFit::update(double x, double y) {
a61af66fc99e Initial load
duke
parents:
diff changeset
117 _sum_x = _sum_x + x;
a61af66fc99e Initial load
duke
parents:
diff changeset
118 _sum_x_squared = _sum_x_squared + x * x;
a61af66fc99e Initial load
duke
parents:
diff changeset
119 _sum_y = _sum_y + y;
a61af66fc99e Initial load
duke
parents:
diff changeset
120 _sum_xy = _sum_xy + x * y;
a61af66fc99e Initial load
duke
parents:
diff changeset
121 _mean_x.sample(x);
a61af66fc99e Initial load
duke
parents:
diff changeset
122 _mean_y.sample(y);
a61af66fc99e Initial load
duke
parents:
diff changeset
123 assert(_mean_x.count() == _mean_y.count(), "Incorrect count");
a61af66fc99e Initial load
duke
parents:
diff changeset
124 if ( _mean_x.count() > 1 ) {
a61af66fc99e Initial load
duke
parents:
diff changeset
125 double slope_denominator;
a61af66fc99e Initial load
duke
parents:
diff changeset
126 slope_denominator = (_mean_x.count() * _sum_x_squared - _sum_x * _sum_x);
a61af66fc99e Initial load
duke
parents:
diff changeset
127 // Some tolerance should be injected here. A denominator that is
a61af66fc99e Initial load
duke
parents:
diff changeset
128 // nearly 0 should be avoided.
a61af66fc99e Initial load
duke
parents:
diff changeset
129
a61af66fc99e Initial load
duke
parents:
diff changeset
130 if (slope_denominator != 0.0) {
a61af66fc99e Initial load
duke
parents:
diff changeset
131 double slope_numerator;
a61af66fc99e Initial load
duke
parents:
diff changeset
132 slope_numerator = (_mean_x.count() * _sum_xy - _sum_x * _sum_y);
a61af66fc99e Initial load
duke
parents:
diff changeset
133 _slope = slope_numerator / slope_denominator;
a61af66fc99e Initial load
duke
parents:
diff changeset
134
a61af66fc99e Initial load
duke
parents:
diff changeset
135 // The _mean_y and _mean_x are decaying averages and can
a61af66fc99e Initial load
duke
parents:
diff changeset
136 // be used to discount earlier data. If they are used,
a61af66fc99e Initial load
duke
parents:
diff changeset
137 // first consider whether all the quantities should be
a61af66fc99e Initial load
duke
parents:
diff changeset
138 // kept as decaying averages.
a61af66fc99e Initial load
duke
parents:
diff changeset
139 // _intercept = _mean_y.average() - _slope * _mean_x.average();
a61af66fc99e Initial load
duke
parents:
diff changeset
140 _intercept = (_sum_y - _slope * _sum_x) / ((double) _mean_x.count());
a61af66fc99e Initial load
duke
parents:
diff changeset
141 }
a61af66fc99e Initial load
duke
parents:
diff changeset
142 }
a61af66fc99e Initial load
duke
parents:
diff changeset
143 }
a61af66fc99e Initial load
duke
parents:
diff changeset
144
a61af66fc99e Initial load
duke
parents:
diff changeset
145 double LinearLeastSquareFit::y(double x) {
a61af66fc99e Initial load
duke
parents:
diff changeset
146 double new_y;
a61af66fc99e Initial load
duke
parents:
diff changeset
147
a61af66fc99e Initial load
duke
parents:
diff changeset
148 if ( _mean_x.count() > 1 ) {
a61af66fc99e Initial load
duke
parents:
diff changeset
149 new_y = (_intercept + _slope * x);
a61af66fc99e Initial load
duke
parents:
diff changeset
150 return new_y;
a61af66fc99e Initial load
duke
parents:
diff changeset
151 } else {
a61af66fc99e Initial load
duke
parents:
diff changeset
152 return _mean_y.average();
a61af66fc99e Initial load
duke
parents:
diff changeset
153 }
a61af66fc99e Initial load
duke
parents:
diff changeset
154 }
a61af66fc99e Initial load
duke
parents:
diff changeset
155
a61af66fc99e Initial load
duke
parents:
diff changeset
156 // Both decrement_will_decrease() and increment_will_decrease() return
a61af66fc99e Initial load
duke
parents:
diff changeset
157 // true for a slope of 0. That is because a change is necessary before
a61af66fc99e Initial load
duke
parents:
diff changeset
158 // a slope can be calculated and a 0 slope will, in general, indicate
a61af66fc99e Initial load
duke
parents:
diff changeset
159 // that no calculation of the slope has yet been done. Returning true
a61af66fc99e Initial load
duke
parents:
diff changeset
160 // for a slope equal to 0 reflects the intuitive expectation of the
a61af66fc99e Initial load
duke
parents:
diff changeset
161 // dependence on the slope. Don't use the complement of these functions
a61af66fc99e Initial load
duke
parents:
diff changeset
162 // since that untuitive expectation is not built into the complement.
a61af66fc99e Initial load
duke
parents:
diff changeset
163 bool LinearLeastSquareFit::decrement_will_decrease() {
a61af66fc99e Initial load
duke
parents:
diff changeset
164 return (_slope >= 0.00);
a61af66fc99e Initial load
duke
parents:
diff changeset
165 }
a61af66fc99e Initial load
duke
parents:
diff changeset
166
a61af66fc99e Initial load
duke
parents:
diff changeset
167 bool LinearLeastSquareFit::increment_will_decrease() {
a61af66fc99e Initial load
duke
parents:
diff changeset
168 return (_slope <= 0.00);
a61af66fc99e Initial load
duke
parents:
diff changeset
169 }