changeset 4279:cdcd26f86af5

Helper class for slow paths
author Christian Wimmer <Christian.Wimmer@Oracle.com>
date Thu, 12 Jan 2012 13:46:26 -0800
parents b49981617b10
children c9ab0ffadddd
files graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64DeoptimizationStub.java graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64LIRInstruction.java graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64MethodEndStub.java graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64SlowPath.java graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64XirOpcode.java
diffstat 5 files changed, 48 insertions(+), 13 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64DeoptimizationStub.java	Thu Jan 12 13:44:57 2012 -0800
+++ b/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64DeoptimizationStub.java	Thu Jan 12 13:46:26 2012 -0800
@@ -33,7 +33,7 @@
 import com.oracle.max.graal.compiler.util.*;
 import com.oracle.max.graal.nodes.DeoptimizeNode.DeoptAction;
 
-public class AMD64DeoptimizationStub implements LIR.SlowPath {
+public class AMD64DeoptimizationStub extends AMD64SlowPath {
     public final Label label = new Label();
     public final LIRDebugInfo info;
     public final DeoptAction action;
@@ -49,9 +49,7 @@
     private static ArrayList<Object> keepAlive = new ArrayList<>();
 
     @Override
-    public void emitCode(TargetMethodAssembler tasm) {
-        AMD64MacroAssembler masm = (AMD64MacroAssembler) tasm.asm;
-
+    public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
         // TODO(cwi): we want to get rid of a generally reserved scratch register.
         CiRegister scratch = tasm.frameMap.registerConfig.getScratchRegister();
 
--- a/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64LIRInstruction.java	Thu Jan 12 13:44:57 2012 -0800
+++ b/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64LIRInstruction.java	Thu Jan 12 13:46:26 2012 -0800
@@ -28,7 +28,7 @@
 import com.oracle.max.graal.compiler.lir.*;
 
 /**
- * Convenience class to cast AbstractAssembler to AMD64MacroAssembler for the {@link #emitCode} method.
+ * Convenience class to provide AMD64MacroAssembler for the {@link #emitCode} method.
  */
 public abstract class AMD64LIRInstruction extends LIRInstruction {
 
--- a/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64MethodEndStub.java	Thu Jan 12 13:44:57 2012 -0800
+++ b/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64MethodEndStub.java	Thu Jan 12 13:46:26 2012 -0800
@@ -25,13 +25,10 @@
 import com.oracle.max.asm.target.amd64.*;
 import com.oracle.max.graal.compiler.*;
 import com.oracle.max.graal.compiler.asm.*;
-import com.oracle.max.graal.compiler.lir.*;
 
-public class AMD64MethodEndStub implements LIR.SlowPath {
+public class AMD64MethodEndStub extends AMD64SlowPath {
     @Override
-    public void emitCode(TargetMethodAssembler tasm) {
-        AMD64MacroAssembler masm = (AMD64MacroAssembler) tasm.asm;
-
+    public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
         for (int i = 0; i < GraalOptions.MethodEndBreakpointGuards; ++i) {
             masm.int3();
         }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64SlowPath.java	Thu Jan 12 13:46:26 2012 -0800
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2012, 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.max.graal.compiler.target.amd64;
+
+import com.oracle.max.asm.target.amd64.*;
+import com.oracle.max.graal.compiler.asm.*;
+import com.oracle.max.graal.compiler.lir.*;
+
+/**
+ * Convenience class to provide AMD64MacroAssembler for the {@link #emitCode} method.
+ */
+public abstract class AMD64SlowPath implements LIR.SlowPath {
+    @Override
+    public final void emitCode(TargetMethodAssembler tasm) {
+        emitCode(tasm, (AMD64MacroAssembler) tasm.asm);
+    }
+
+    public abstract void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm);
+}
--- a/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64XirOpcode.java	Thu Jan 12 13:44:57 2012 -0800
+++ b/graal/com.oracle.max.graal.compiler/src/com/oracle/max/graal/compiler/target/amd64/AMD64XirOpcode.java	Thu Jan 12 13:46:26 2012 -0800
@@ -89,7 +89,7 @@
         }
     }
 
-    private static class SlowPath implements LIR.SlowPath {
+    private static class SlowPath extends AMD64SlowPath {
         public final LIRXirInstruction instruction;
         public final Label[] labels;
         public final Map<XirMark, Mark> marks;
@@ -100,8 +100,9 @@
             this.marks = marks;
         }
 
-        public void emitCode(TargetMethodAssembler tasm) {
-            emitSlowPath(tasm, (AMD64MacroAssembler) tasm.asm, this);
+        @Override
+        public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
+            emitSlowPath(tasm, masm, this);
         }
     }