annotate src/share/vm/gc_implementation/shared/gcUtil.cpp @ 1972:f95d63e2154a

6989984: Use standard include model for Hospot Summary: Replaced MakeDeps and the includeDB files with more standardized solutions. Reviewed-by: coleenp, kvn, kamg
author stefank
date Tue, 23 Nov 2010 13:22:55 -0800
parents c18cbe5936b8
children 176d0be30214
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
0
a61af66fc99e Initial load
duke
parents:
diff changeset
1 /*
1972
f95d63e2154a 6989984: Use standard include model for Hospot
stefank
parents: 1552
diff changeset
2 * Copyright (c) 2002, 2010, Oracle and/or its affiliates. All rights reserved.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
a61af66fc99e Initial load
duke
parents:
diff changeset
4 *
a61af66fc99e Initial load
duke
parents:
diff changeset
5 * This code is free software; you can redistribute it and/or modify it
a61af66fc99e Initial load
duke
parents:
diff changeset
6 * under the terms of the GNU General Public License version 2 only, as
a61af66fc99e Initial load
duke
parents:
diff changeset
7 * published by the Free Software Foundation.
a61af66fc99e Initial load
duke
parents:
diff changeset
8 *
a61af66fc99e Initial load
duke
parents:
diff changeset
9 * This code is distributed in the hope that it will be useful, but WITHOUT
a61af66fc99e Initial load
duke
parents:
diff changeset
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
a61af66fc99e Initial load
duke
parents:
diff changeset
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
a61af66fc99e Initial load
duke
parents:
diff changeset
12 * version 2 for more details (a copy is included in the LICENSE file that
a61af66fc99e Initial load
duke
parents:
diff changeset
13 * accompanied this code).
a61af66fc99e Initial load
duke
parents:
diff changeset
14 *
a61af66fc99e Initial load
duke
parents:
diff changeset
15 * You should have received a copy of the GNU General Public License version
a61af66fc99e Initial load
duke
parents:
diff changeset
16 * 2 along with this work; if not, write to the Free Software Foundation,
a61af66fc99e Initial load
duke
parents:
diff changeset
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
a61af66fc99e Initial load
duke
parents:
diff changeset
18 *
1552
c18cbe5936b8 6941466: Oracle rebranding changes for Hotspot repositories
trims
parents: 1145
diff changeset
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
c18cbe5936b8 6941466: Oracle rebranding changes for Hotspot repositories
trims
parents: 1145
diff changeset
20 * or visit www.oracle.com if you need additional information or have any
c18cbe5936b8 6941466: Oracle rebranding changes for Hotspot repositories
trims
parents: 1145
diff changeset
21 * questions.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
22 *
a61af66fc99e Initial load
duke
parents:
diff changeset
23 */
a61af66fc99e Initial load
duke
parents:
diff changeset
24
1972
f95d63e2154a 6989984: Use standard include model for Hospot
stefank
parents: 1552
diff changeset
25 #include "precompiled.hpp"
f95d63e2154a 6989984: Use standard include model for Hospot
stefank
parents: 1552
diff changeset
26 #include "gc_implementation/shared/gcUtil.hpp"
0
a61af66fc99e Initial load
duke
parents:
diff changeset
27
a61af66fc99e Initial load
duke
parents:
diff changeset
28 // Catch-all file for utility classes
a61af66fc99e Initial load
duke
parents:
diff changeset
29
a61af66fc99e Initial load
duke
parents:
diff changeset
30 float AdaptiveWeightedAverage::compute_adaptive_average(float new_sample,
a61af66fc99e Initial load
duke
parents:
diff changeset
31 float average) {
a61af66fc99e Initial load
duke
parents:
diff changeset
32 // We smooth the samples by not using weight() directly until we've
a61af66fc99e Initial load
duke
parents:
diff changeset
33 // had enough data to make it meaningful. We'd like the first weight
a61af66fc99e Initial load
duke
parents:
diff changeset
34 // used to be 1, the second to be 1/2, etc until we have 100/weight
a61af66fc99e Initial load
duke
parents:
diff changeset
35 // samples.
a61af66fc99e Initial load
duke
parents:
diff changeset
36 unsigned count_weight = 100/count();
a61af66fc99e Initial load
duke
parents:
diff changeset
37 unsigned adaptive_weight = (MAX2(weight(), count_weight));
a61af66fc99e Initial load
duke
parents:
diff changeset
38
a61af66fc99e Initial load
duke
parents:
diff changeset
39 float new_avg = exp_avg(average, new_sample, adaptive_weight);
a61af66fc99e Initial load
duke
parents:
diff changeset
40
a61af66fc99e Initial load
duke
parents:
diff changeset
41 return new_avg;
a61af66fc99e Initial load
duke
parents:
diff changeset
42 }
a61af66fc99e Initial load
duke
parents:
diff changeset
43
a61af66fc99e Initial load
duke
parents:
diff changeset
44 void AdaptiveWeightedAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
45 increment_count();
a61af66fc99e Initial load
duke
parents:
diff changeset
46 assert(count() != 0,
a61af66fc99e Initial load
duke
parents:
diff changeset
47 "Wraparound -- history would be incorrectly discarded");
a61af66fc99e Initial load
duke
parents:
diff changeset
48
a61af66fc99e Initial load
duke
parents:
diff changeset
49 // Compute the new weighted average
a61af66fc99e Initial load
duke
parents:
diff changeset
50 float new_avg = compute_adaptive_average(new_sample, average());
a61af66fc99e Initial load
duke
parents:
diff changeset
51 set_average(new_avg);
a61af66fc99e Initial load
duke
parents:
diff changeset
52 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
53 }
a61af66fc99e Initial load
duke
parents:
diff changeset
54
1145
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
55 void AdaptiveWeightedAverage::print() const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
56 print_on(tty);
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
57 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
58
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
59 void AdaptiveWeightedAverage::print_on(outputStream* st) const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
60 guarantee(false, "NYI");
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
61 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
62
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
63 void AdaptivePaddedAverage::print() const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
64 print_on(tty);
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
65 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
66
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
67 void AdaptivePaddedAverage::print_on(outputStream* st) const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
68 guarantee(false, "NYI");
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
69 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
70
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
71 void AdaptivePaddedNoZeroDevAverage::print() const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
72 print_on(tty);
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
73 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
74
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
75 void AdaptivePaddedNoZeroDevAverage::print_on(outputStream* st) const {
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
76 guarantee(false, "NYI");
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
77 }
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
78
0
a61af66fc99e Initial load
duke
parents:
diff changeset
79 void AdaptivePaddedAverage::sample(float new_sample) {
1145
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
80 // Compute new adaptive weighted average based on new sample.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
81 AdaptiveWeightedAverage::sample(new_sample);
a61af66fc99e Initial load
duke
parents:
diff changeset
82
1145
e018e6884bd8 6631166: CMS: better heuristics when combatting fragmentation
ysr
parents: 0
diff changeset
83 // Now update the deviation and the padded average.
0
a61af66fc99e Initial load
duke
parents:
diff changeset
84 float new_avg = average();
a61af66fc99e Initial load
duke
parents:
diff changeset
85 float new_dev = compute_adaptive_average(fabsd(new_sample - new_avg),
a61af66fc99e Initial load
duke
parents:
diff changeset
86 deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
87 set_deviation(new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
88 set_padded_average(new_avg + padding() * new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
89 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
90 }
a61af66fc99e Initial load
duke
parents:
diff changeset
91
a61af66fc99e Initial load
duke
parents:
diff changeset
92 void AdaptivePaddedNoZeroDevAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
93 // Compute our parent classes sample information
a61af66fc99e Initial load
duke
parents:
diff changeset
94 AdaptiveWeightedAverage::sample(new_sample);
a61af66fc99e Initial load
duke
parents:
diff changeset
95
a61af66fc99e Initial load
duke
parents:
diff changeset
96 float new_avg = average();
a61af66fc99e Initial load
duke
parents:
diff changeset
97 if (new_sample != 0) {
a61af66fc99e Initial load
duke
parents:
diff changeset
98 // We only create a new deviation if the sample is non-zero
a61af66fc99e Initial load
duke
parents:
diff changeset
99 float new_dev = compute_adaptive_average(fabsd(new_sample - new_avg),
a61af66fc99e Initial load
duke
parents:
diff changeset
100 deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
101
a61af66fc99e Initial load
duke
parents:
diff changeset
102 set_deviation(new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
103 }
a61af66fc99e Initial load
duke
parents:
diff changeset
104 set_padded_average(new_avg + padding() * deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
105 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
106 }
a61af66fc99e Initial load
duke
parents:
diff changeset
107
a61af66fc99e Initial load
duke
parents:
diff changeset
108 LinearLeastSquareFit::LinearLeastSquareFit(unsigned weight) :
a61af66fc99e Initial load
duke
parents:
diff changeset
109 _sum_x(0), _sum_y(0), _sum_xy(0),
a61af66fc99e Initial load
duke
parents:
diff changeset
110 _mean_x(weight), _mean_y(weight) {}
a61af66fc99e Initial load
duke
parents:
diff changeset
111
a61af66fc99e Initial load
duke
parents:
diff changeset
112 void LinearLeastSquareFit::update(double x, double y) {
a61af66fc99e Initial load
duke
parents:
diff changeset
113 _sum_x = _sum_x + x;
a61af66fc99e Initial load
duke
parents:
diff changeset
114 _sum_x_squared = _sum_x_squared + x * x;
a61af66fc99e Initial load
duke
parents:
diff changeset
115 _sum_y = _sum_y + y;
a61af66fc99e Initial load
duke
parents:
diff changeset
116 _sum_xy = _sum_xy + x * y;
a61af66fc99e Initial load
duke
parents:
diff changeset
117 _mean_x.sample(x);
a61af66fc99e Initial load
duke
parents:
diff changeset
118 _mean_y.sample(y);
a61af66fc99e Initial load
duke
parents:
diff changeset
119 assert(_mean_x.count() == _mean_y.count(), "Incorrect count");
a61af66fc99e Initial load
duke
parents:
diff changeset
120 if ( _mean_x.count() > 1 ) {
a61af66fc99e Initial load
duke
parents:
diff changeset
121 double slope_denominator;
a61af66fc99e Initial load
duke
parents:
diff changeset
122 slope_denominator = (_mean_x.count() * _sum_x_squared - _sum_x * _sum_x);
a61af66fc99e Initial load
duke
parents:
diff changeset
123 // Some tolerance should be injected here. A denominator that is
a61af66fc99e Initial load
duke
parents:
diff changeset
124 // nearly 0 should be avoided.
a61af66fc99e Initial load
duke
parents:
diff changeset
125
a61af66fc99e Initial load
duke
parents:
diff changeset
126 if (slope_denominator != 0.0) {
a61af66fc99e Initial load
duke
parents:
diff changeset
127 double slope_numerator;
a61af66fc99e Initial load
duke
parents:
diff changeset
128 slope_numerator = (_mean_x.count() * _sum_xy - _sum_x * _sum_y);
a61af66fc99e Initial load
duke
parents:
diff changeset
129 _slope = slope_numerator / slope_denominator;
a61af66fc99e Initial load
duke
parents:
diff changeset
130
a61af66fc99e Initial load
duke
parents:
diff changeset
131 // The _mean_y and _mean_x are decaying averages and can
a61af66fc99e Initial load
duke
parents:
diff changeset
132 // be used to discount earlier data. If they are used,
a61af66fc99e Initial load
duke
parents:
diff changeset
133 // first consider whether all the quantities should be
a61af66fc99e Initial load
duke
parents:
diff changeset
134 // kept as decaying averages.
a61af66fc99e Initial load
duke
parents:
diff changeset
135 // _intercept = _mean_y.average() - _slope * _mean_x.average();
a61af66fc99e Initial load
duke
parents:
diff changeset
136 _intercept = (_sum_y - _slope * _sum_x) / ((double) _mean_x.count());
a61af66fc99e Initial load
duke
parents:
diff changeset
137 }
a61af66fc99e Initial load
duke
parents:
diff changeset
138 }
a61af66fc99e Initial load
duke
parents:
diff changeset
139 }
a61af66fc99e Initial load
duke
parents:
diff changeset
140
a61af66fc99e Initial load
duke
parents:
diff changeset
141 double LinearLeastSquareFit::y(double x) {
a61af66fc99e Initial load
duke
parents:
diff changeset
142 double new_y;
a61af66fc99e Initial load
duke
parents:
diff changeset
143
a61af66fc99e Initial load
duke
parents:
diff changeset
144 if ( _mean_x.count() > 1 ) {
a61af66fc99e Initial load
duke
parents:
diff changeset
145 new_y = (_intercept + _slope * x);
a61af66fc99e Initial load
duke
parents:
diff changeset
146 return new_y;
a61af66fc99e Initial load
duke
parents:
diff changeset
147 } else {
a61af66fc99e Initial load
duke
parents:
diff changeset
148 return _mean_y.average();
a61af66fc99e Initial load
duke
parents:
diff changeset
149 }
a61af66fc99e Initial load
duke
parents:
diff changeset
150 }
a61af66fc99e Initial load
duke
parents:
diff changeset
151
a61af66fc99e Initial load
duke
parents:
diff changeset
152 // Both decrement_will_decrease() and increment_will_decrease() return
a61af66fc99e Initial load
duke
parents:
diff changeset
153 // true for a slope of 0. That is because a change is necessary before
a61af66fc99e Initial load
duke
parents:
diff changeset
154 // a slope can be calculated and a 0 slope will, in general, indicate
a61af66fc99e Initial load
duke
parents:
diff changeset
155 // that no calculation of the slope has yet been done. Returning true
a61af66fc99e Initial load
duke
parents:
diff changeset
156 // for a slope equal to 0 reflects the intuitive expectation of the
a61af66fc99e Initial load
duke
parents:
diff changeset
157 // dependence on the slope. Don't use the complement of these functions
a61af66fc99e Initial load
duke
parents:
diff changeset
158 // since that untuitive expectation is not built into the complement.
a61af66fc99e Initial load
duke
parents:
diff changeset
159 bool LinearLeastSquareFit::decrement_will_decrease() {
a61af66fc99e Initial load
duke
parents:
diff changeset
160 return (_slope >= 0.00);
a61af66fc99e Initial load
duke
parents:
diff changeset
161 }
a61af66fc99e Initial load
duke
parents:
diff changeset
162
a61af66fc99e Initial load
duke
parents:
diff changeset
163 bool LinearLeastSquareFit::increment_will_decrease() {
a61af66fc99e Initial load
duke
parents:
diff changeset
164 return (_slope <= 0.00);
a61af66fc99e Initial load
duke
parents:
diff changeset
165 }