Mercurial > hg > graal-jvmci-8
view graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/phases/RematerializationPhase.java @ 3221:034a4db85c59
Draft rematerialization after eager GVN, only canonicalize new nodes after loop optimisations
author | Gilles Duboscq <gilles.duboscq@oracle.com> |
---|---|
date | Thu, 14 Jul 2011 22:22:44 +0200 |
parents | |
children | 8793d44991fd |
line wrap: on
line source
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.max.graal.compiler.phases; import java.text.*; import java.util.*; import com.oracle.max.graal.compiler.*; import com.oracle.max.graal.compiler.ir.*; import com.oracle.max.graal.compiler.schedule.*; import com.oracle.max.graal.compiler.util.*; import com.oracle.max.graal.graph.*; public class RematerializationPhase extends Phase { private NodeMap<Block> nodeToBlock; private HashMap<Node, Block> newNodesToBlock; private List<Block> blocks; private UsageProbability[] probablityCache; private boolean ignoreUsages; @Override protected void run(Graph graph) { final IdentifyBlocksPhase s = new IdentifyBlocksPhase(true); s.apply(graph); newNodesToBlock = new HashMap<Node, Block>(); nodeToBlock = s.getNodeToBlock(); blocks = s.getBlocks(); probablityCache = new UsageProbability[blocks.size()]; NodeWorkList work = graph.createNodeWorkList(); NodeBitMap done = graph.createNodeBitMap(); work.addAll(graph.getNodes(FloatingNode.class)); for (Node node : work) { if (node instanceof Phi || node instanceof Local || node instanceof Constant || node instanceof LocationNode) { done.mark(node); continue; } boolean delay = false; for (Node usage : node.usages()) { if (usage instanceof FloatingNode && !(usage instanceof Phi) && done.isNotNewNotMarked(usage)) { delay = true; break; } } if (delay) { work.addAgain(node); continue; } done.mark(node); Arrays.fill(probablityCache, null); ignoreUsages = true; Block block = nodeToBlock.get(node); if (block == null) { continue; } UsageProbability usageProbability = usageProbability(node, block); if (usageProbability.probability < GraalOptions.MinimumUsageProbability) { if (ignoreUsages) { ignoreUsages = false; Arrays.fill(probablityCache, null); usageProbability = usageProbability(node, block); // recompute with usage maps } //System.out.println("going to remarterialize " + node + " at " + block + " : " + toString(usageProbability)); boolean first = true; for (Block sux : block.getSuccessors()) { if (first) { first = false; continue; } usageProbability = usageProbability(node, sux); List<Node> usages = new LinkedList<Node>(); for (Node usage : usageProbability.usages) { usages.add(usage); } if (!usages.isEmpty()) { Node copy = node.copyWithEdges(); newNodesToBlock.put(copy, sux); GraalMetrics.Rematerializations++; //System.out.println("Rematerialized " + node + " : " + toString(usageProbability)); for (Node usage : usages) { usage.inputs().replace(node, copy); } } } } } } private UsageProbability usageProbability(Node n, Block b) { UsageProbability cached = probablityCache[b.blockID()]; if (cached != null) { return cached; } if (ignoreUsages) { GraalMetrics.PartialUsageProbability++; } else { GraalMetrics.FullUsageProbability++; } for (Node usage : n.usages()) { if (usage instanceof Phi) { Phi phi = (Phi) usage; Merge merge = phi.merge(); for (int i = 0; i < phi.valueCount(); i++) { if (phi.valueAt(i) == n) { insertUsageInCache(merge.phiPredecessorAt(i)); } } } else { insertUsageInCache(usage); } } return usageProbability0(n, b); } private void insertUsageInCache(Node usage) { Block block = block(usage); if (block == null) { return; } int blockID = block.blockID(); UsageProbability usageProbability = probablityCache[blockID]; if (usageProbability == null) { probablityCache[blockID] = new UsageProbability(usage); } else if (!ignoreUsages) { usageProbability.usages.mark(usage); } } private Block block(Node node) { Block block; if (!nodeToBlock.isNew(node)) { block = nodeToBlock.get(node); } else { block = newNodesToBlock.get(node); assert block != null; } return block; } private UsageProbability usageProbability0(Node n, Block b) { //System.out.println("usageProbability0(" + n.id() + ", " + b + ")"); UsageProbability cached = probablityCache[b.blockID()]; if (cached != null && (cached.computed || ignoreUsages)) { return cached; } UsageProbability result = cached; if (result == null) { result = new UsageProbability(n.graph()); } if (b.getSuccessors().size() > 0) { if (b.isLoopEnd()) { Block loopHeader = b.getSuccessors().get(0); assert loopHeader.isLoopHeader(); UsageProbability headerUsages = probablityCache[loopHeader.blockID()]; if (headerUsages != null) { result.merge(headerUsages, 1.0); } } else if (b.getSuccessors().size() == 1) { result.merge(usageProbability0(n, b.getSuccessors().get(0)), 1.0); } else { Node lastNode = b.lastNode(); if (lastNode instanceof Invoke) { result.merge(usageProbability0(n, nodeToBlock.get(((Invoke) lastNode).next())), 1.0); result.merge(usageProbability0(n, nodeToBlock.get(((Invoke) lastNode).exceptionEdge())), 0.0); } else if (lastNode instanceof ControlSplit) { ControlSplit split = (ControlSplit) lastNode; for (int i = 0; i < split.blockSuccessorCount(); i++) { result.merge(usageProbability0(n, nodeToBlock.get(split.blockSuccessor(i))), split.probability(i)); } } else { throw Util.shouldNotReachHere(); } } } probablityCache[b.blockID()] = result; result.computed = true; return result; } private class UsageProbability { double probability; NodeBitMap usages; boolean computed; public UsageProbability(Node usage) { if (!ignoreUsages) { usages = usage.graph().createNodeBitMap(); usages.mark(usage); } probability = 1.0; } public UsageProbability(Graph graph) { if (!ignoreUsages) { usages = graph.createNodeBitMap(); } probability = 0.0; } public void merge(UsageProbability sux, double suxProbability) { if (!ignoreUsages) { usages.setUnion(sux.usages); } probability += suxProbability * sux.probability; } } private String toString(UsageProbability up) { NumberFormat nf = NumberFormat.getPercentInstance(); StringBuilder sb = new StringBuilder("p="); sb.append(nf.format(up.probability)); sb.append(" U=["); for (Node n : up.usages) { sb.append(n); sb.append(", "); } sb.append("]"); return sb.toString(); } }