annotate src/share/vm/gc_implementation/shared/gcUtil.cpp @ 989:148e5441d916

6863023: need non-perm oops in code cache for JSR 292 Summary: Make a special root-list for those few nmethods which might contain non-perm oops. Reviewed-by: twisti, kvn, never, jmasa, ysr
author jrose
date Tue, 15 Sep 2009 21:53:47 -0700
parents a61af66fc99e
children e018e6884bd8
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
0
a61af66fc99e Initial load
duke
parents:
diff changeset
1 /*
a61af66fc99e Initial load
duke
parents:
diff changeset
2 * Copyright 2002-2005 Sun Microsystems, Inc. All Rights Reserved.
a61af66fc99e Initial load
duke
parents:
diff changeset
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
a61af66fc99e Initial load
duke
parents:
diff changeset
4 *
a61af66fc99e Initial load
duke
parents:
diff changeset
5 * This code is free software; you can redistribute it and/or modify it
a61af66fc99e Initial load
duke
parents:
diff changeset
6 * under the terms of the GNU General Public License version 2 only, as
a61af66fc99e Initial load
duke
parents:
diff changeset
7 * published by the Free Software Foundation.
a61af66fc99e Initial load
duke
parents:
diff changeset
8 *
a61af66fc99e Initial load
duke
parents:
diff changeset
9 * This code is distributed in the hope that it will be useful, but WITHOUT
a61af66fc99e Initial load
duke
parents:
diff changeset
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
a61af66fc99e Initial load
duke
parents:
diff changeset
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
a61af66fc99e Initial load
duke
parents:
diff changeset
12 * version 2 for more details (a copy is included in the LICENSE file that
a61af66fc99e Initial load
duke
parents:
diff changeset
13 * accompanied this code).
a61af66fc99e Initial load
duke
parents:
diff changeset
14 *
a61af66fc99e Initial load
duke
parents:
diff changeset
15 * You should have received a copy of the GNU General Public License version
a61af66fc99e Initial load
duke
parents:
diff changeset
16 * 2 along with this work; if not, write to the Free Software Foundation,
a61af66fc99e Initial load
duke
parents:
diff changeset
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
a61af66fc99e Initial load
duke
parents:
diff changeset
18 *
a61af66fc99e Initial load
duke
parents:
diff changeset
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
a61af66fc99e Initial load
duke
parents:
diff changeset
20 * CA 95054 USA or visit www.sun.com if you need additional information or
a61af66fc99e Initial load
duke
parents:
diff changeset
21 * have any questions.
a61af66fc99e Initial load
duke
parents:
diff changeset
22 *
a61af66fc99e Initial load
duke
parents:
diff changeset
23 */
a61af66fc99e Initial load
duke
parents:
diff changeset
24
a61af66fc99e Initial load
duke
parents:
diff changeset
25 # include "incls/_precompiled.incl"
a61af66fc99e Initial load
duke
parents:
diff changeset
26 # include "incls/_gcUtil.cpp.incl"
a61af66fc99e Initial load
duke
parents:
diff changeset
27
a61af66fc99e Initial load
duke
parents:
diff changeset
28 // Catch-all file for utility classes
a61af66fc99e Initial load
duke
parents:
diff changeset
29
a61af66fc99e Initial load
duke
parents:
diff changeset
30 float AdaptiveWeightedAverage::compute_adaptive_average(float new_sample,
a61af66fc99e Initial load
duke
parents:
diff changeset
31 float average) {
a61af66fc99e Initial load
duke
parents:
diff changeset
32 // We smooth the samples by not using weight() directly until we've
a61af66fc99e Initial load
duke
parents:
diff changeset
33 // had enough data to make it meaningful. We'd like the first weight
a61af66fc99e Initial load
duke
parents:
diff changeset
34 // used to be 1, the second to be 1/2, etc until we have 100/weight
a61af66fc99e Initial load
duke
parents:
diff changeset
35 // samples.
a61af66fc99e Initial load
duke
parents:
diff changeset
36 unsigned count_weight = 100/count();
a61af66fc99e Initial load
duke
parents:
diff changeset
37 unsigned adaptive_weight = (MAX2(weight(), count_weight));
a61af66fc99e Initial load
duke
parents:
diff changeset
38
a61af66fc99e Initial load
duke
parents:
diff changeset
39 float new_avg = exp_avg(average, new_sample, adaptive_weight);
a61af66fc99e Initial load
duke
parents:
diff changeset
40
a61af66fc99e Initial load
duke
parents:
diff changeset
41 return new_avg;
a61af66fc99e Initial load
duke
parents:
diff changeset
42 }
a61af66fc99e Initial load
duke
parents:
diff changeset
43
a61af66fc99e Initial load
duke
parents:
diff changeset
44 void AdaptiveWeightedAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
45 increment_count();
a61af66fc99e Initial load
duke
parents:
diff changeset
46 assert(count() != 0,
a61af66fc99e Initial load
duke
parents:
diff changeset
47 "Wraparound -- history would be incorrectly discarded");
a61af66fc99e Initial load
duke
parents:
diff changeset
48
a61af66fc99e Initial load
duke
parents:
diff changeset
49 // Compute the new weighted average
a61af66fc99e Initial load
duke
parents:
diff changeset
50 float new_avg = compute_adaptive_average(new_sample, average());
a61af66fc99e Initial load
duke
parents:
diff changeset
51 set_average(new_avg);
a61af66fc99e Initial load
duke
parents:
diff changeset
52 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
53 }
a61af66fc99e Initial load
duke
parents:
diff changeset
54
a61af66fc99e Initial load
duke
parents:
diff changeset
55 void AdaptivePaddedAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
56 // Compute our parent classes sample information
a61af66fc99e Initial load
duke
parents:
diff changeset
57 AdaptiveWeightedAverage::sample(new_sample);
a61af66fc99e Initial load
duke
parents:
diff changeset
58
a61af66fc99e Initial load
duke
parents:
diff changeset
59 // Now compute the deviation and the new padded sample
a61af66fc99e Initial load
duke
parents:
diff changeset
60 float new_avg = average();
a61af66fc99e Initial load
duke
parents:
diff changeset
61 float new_dev = compute_adaptive_average(fabsd(new_sample - new_avg),
a61af66fc99e Initial load
duke
parents:
diff changeset
62 deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
63 set_deviation(new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
64 set_padded_average(new_avg + padding() * new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
65 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
66 }
a61af66fc99e Initial load
duke
parents:
diff changeset
67
a61af66fc99e Initial load
duke
parents:
diff changeset
68 void AdaptivePaddedNoZeroDevAverage::sample(float new_sample) {
a61af66fc99e Initial load
duke
parents:
diff changeset
69 // Compute our parent classes sample information
a61af66fc99e Initial load
duke
parents:
diff changeset
70 AdaptiveWeightedAverage::sample(new_sample);
a61af66fc99e Initial load
duke
parents:
diff changeset
71
a61af66fc99e Initial load
duke
parents:
diff changeset
72 float new_avg = average();
a61af66fc99e Initial load
duke
parents:
diff changeset
73 if (new_sample != 0) {
a61af66fc99e Initial load
duke
parents:
diff changeset
74 // We only create a new deviation if the sample is non-zero
a61af66fc99e Initial load
duke
parents:
diff changeset
75 float new_dev = compute_adaptive_average(fabsd(new_sample - new_avg),
a61af66fc99e Initial load
duke
parents:
diff changeset
76 deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
77
a61af66fc99e Initial load
duke
parents:
diff changeset
78 set_deviation(new_dev);
a61af66fc99e Initial load
duke
parents:
diff changeset
79 }
a61af66fc99e Initial load
duke
parents:
diff changeset
80 set_padded_average(new_avg + padding() * deviation());
a61af66fc99e Initial load
duke
parents:
diff changeset
81 _last_sample = new_sample;
a61af66fc99e Initial load
duke
parents:
diff changeset
82 }
a61af66fc99e Initial load
duke
parents:
diff changeset
83
a61af66fc99e Initial load
duke
parents:
diff changeset
84 LinearLeastSquareFit::LinearLeastSquareFit(unsigned weight) :
a61af66fc99e Initial load
duke
parents:
diff changeset
85 _sum_x(0), _sum_y(0), _sum_xy(0),
a61af66fc99e Initial load
duke
parents:
diff changeset
86 _mean_x(weight), _mean_y(weight) {}
a61af66fc99e Initial load
duke
parents:
diff changeset
87
a61af66fc99e Initial load
duke
parents:
diff changeset
88 void LinearLeastSquareFit::update(double x, double y) {
a61af66fc99e Initial load
duke
parents:
diff changeset
89 _sum_x = _sum_x + x;
a61af66fc99e Initial load
duke
parents:
diff changeset
90 _sum_x_squared = _sum_x_squared + x * x;
a61af66fc99e Initial load
duke
parents:
diff changeset
91 _sum_y = _sum_y + y;
a61af66fc99e Initial load
duke
parents:
diff changeset
92 _sum_xy = _sum_xy + x * y;
a61af66fc99e Initial load
duke
parents:
diff changeset
93 _mean_x.sample(x);
a61af66fc99e Initial load
duke
parents:
diff changeset
94 _mean_y.sample(y);
a61af66fc99e Initial load
duke
parents:
diff changeset
95 assert(_mean_x.count() == _mean_y.count(), "Incorrect count");
a61af66fc99e Initial load
duke
parents:
diff changeset
96 if ( _mean_x.count() > 1 ) {
a61af66fc99e Initial load
duke
parents:
diff changeset
97 double slope_denominator;
a61af66fc99e Initial load
duke
parents:
diff changeset
98 slope_denominator = (_mean_x.count() * _sum_x_squared - _sum_x * _sum_x);
a61af66fc99e Initial load
duke
parents:
diff changeset
99 // Some tolerance should be injected here. A denominator that is
a61af66fc99e Initial load
duke
parents:
diff changeset
100 // nearly 0 should be avoided.
a61af66fc99e Initial load
duke
parents:
diff changeset
101
a61af66fc99e Initial load
duke
parents:
diff changeset
102 if (slope_denominator != 0.0) {
a61af66fc99e Initial load
duke
parents:
diff changeset
103 double slope_numerator;
a61af66fc99e Initial load
duke
parents:
diff changeset
104 slope_numerator = (_mean_x.count() * _sum_xy - _sum_x * _sum_y);
a61af66fc99e Initial load
duke
parents:
diff changeset
105 _slope = slope_numerator / slope_denominator;
a61af66fc99e Initial load
duke
parents:
diff changeset
106
a61af66fc99e Initial load
duke
parents:
diff changeset
107 // The _mean_y and _mean_x are decaying averages and can
a61af66fc99e Initial load
duke
parents:
diff changeset
108 // be used to discount earlier data. If they are used,
a61af66fc99e Initial load
duke
parents:
diff changeset
109 // first consider whether all the quantities should be
a61af66fc99e Initial load
duke
parents:
diff changeset
110 // kept as decaying averages.
a61af66fc99e Initial load
duke
parents:
diff changeset
111 // _intercept = _mean_y.average() - _slope * _mean_x.average();
a61af66fc99e Initial load
duke
parents:
diff changeset
112 _intercept = (_sum_y - _slope * _sum_x) / ((double) _mean_x.count());
a61af66fc99e Initial load
duke
parents:
diff changeset
113 }
a61af66fc99e Initial load
duke
parents:
diff changeset
114 }
a61af66fc99e Initial load
duke
parents:
diff changeset
115 }
a61af66fc99e Initial load
duke
parents:
diff changeset
116
a61af66fc99e Initial load
duke
parents:
diff changeset
117 double LinearLeastSquareFit::y(double x) {
a61af66fc99e Initial load
duke
parents:
diff changeset
118 double new_y;
a61af66fc99e Initial load
duke
parents:
diff changeset
119
a61af66fc99e Initial load
duke
parents:
diff changeset
120 if ( _mean_x.count() > 1 ) {
a61af66fc99e Initial load
duke
parents:
diff changeset
121 new_y = (_intercept + _slope * x);
a61af66fc99e Initial load
duke
parents:
diff changeset
122 return new_y;
a61af66fc99e Initial load
duke
parents:
diff changeset
123 } else {
a61af66fc99e Initial load
duke
parents:
diff changeset
124 return _mean_y.average();
a61af66fc99e Initial load
duke
parents:
diff changeset
125 }
a61af66fc99e Initial load
duke
parents:
diff changeset
126 }
a61af66fc99e Initial load
duke
parents:
diff changeset
127
a61af66fc99e Initial load
duke
parents:
diff changeset
128 // Both decrement_will_decrease() and increment_will_decrease() return
a61af66fc99e Initial load
duke
parents:
diff changeset
129 // true for a slope of 0. That is because a change is necessary before
a61af66fc99e Initial load
duke
parents:
diff changeset
130 // a slope can be calculated and a 0 slope will, in general, indicate
a61af66fc99e Initial load
duke
parents:
diff changeset
131 // that no calculation of the slope has yet been done. Returning true
a61af66fc99e Initial load
duke
parents:
diff changeset
132 // for a slope equal to 0 reflects the intuitive expectation of the
a61af66fc99e Initial load
duke
parents:
diff changeset
133 // dependence on the slope. Don't use the complement of these functions
a61af66fc99e Initial load
duke
parents:
diff changeset
134 // since that untuitive expectation is not built into the complement.
a61af66fc99e Initial load
duke
parents:
diff changeset
135 bool LinearLeastSquareFit::decrement_will_decrease() {
a61af66fc99e Initial load
duke
parents:
diff changeset
136 return (_slope >= 0.00);
a61af66fc99e Initial load
duke
parents:
diff changeset
137 }
a61af66fc99e Initial load
duke
parents:
diff changeset
138
a61af66fc99e Initial load
duke
parents:
diff changeset
139 bool LinearLeastSquareFit::increment_will_decrease() {
a61af66fc99e Initial load
duke
parents:
diff changeset
140 return (_slope <= 0.00);
a61af66fc99e Initial load
duke
parents:
diff changeset
141 }