comparison doc/design/graal_compiler.tex @ 2578:999407dbfe10

merge
author Lukas Stadler <lukas.stadler@jku.at>
date Wed, 04 May 2011 17:02:11 +0200
parents ac2029d0898f d1ea2563836d
children 4a36a0bd6d18 26ecba0ead6d
comparison
equal deleted inserted replaced
2577:ac2029d0898f 2578:999407dbfe10
24 \fbox{\bfseries\sffamily\scriptsize }}} 24 \fbox{\bfseries\sffamily\scriptsize }}}
25 25
26 \newcommand\TODO[1]{\mynote{TODO}{#1}} 26 \newcommand\TODO[1]{\mynote{TODO}{#1}}
27 \newcommand\cw[1]{\mynote{CW}{#1}} 27 \newcommand\cw[1]{\mynote{CW}{#1}}
28 \newcommand\ls[1]{\mynote{LS}{#1}} 28 \newcommand\ls[1]{\mynote{LS}{#1}}
29 \newcommand\nodename[1]{\texttt{#1}}
29 30
30 31
31 32
32 \smartqed % flush right qed marks, e.g. at end of proof 33 \smartqed % flush right qed marks, e.g. at end of proof
33 34
116 \item Implementation of a prototype front-end for different languages, e.g., JavaScript. 117 \item Implementation of a prototype front-end for different languages, e.g., JavaScript.
117 \end{itemize} 118 \end{itemize}
118 119
119 \section{Implementation} 120 \section{Implementation}
120 121
122 \subsection{Loops}
123 Loops form a first-class construct in the IR that is expressed in specialized IR nodes during all optimization phases.
124 We only compile methods with a control flow where every loop has only one single entry point.
125 This entry point is a \nodename{LoopBegin} node.
126 This node is connected to a \nodename{LoopEnd} node that merges all control flow paths that do not exit the loop.
127 The edge between the \nodename{LoopBegin} and the \nodename{LoopEnd} is the backedge of the loop.
128 It goes from the beginning to the end in order to make the graph acyclic.
129 An algorithm that traverses the control flow has to explicitely decide whether it wants to incorporate backedges (i.e., special case the treatment of \nodename{LoopEnd}) or ignore them.
130 Figure \ref{fig:loop1} shows a simple example with a loop with a single entry and two exits.
131
132 \begin{figure}[h]
133 \label{fig:loop1}
134 \centering
135 \begin{digraphenv}{scale=0.5}{layout1}
136 \textnode{BeforeLoop}{Loop entry}
137 \textnode{Exit1}{First loop exit}
138 \textnode{Exit2}{Second loop exit}
139 \nodesplit{LoopBegin}{LoopBegin}
140 \node{LoopEnd}{LoopEnd}
141 \nodesplit{If1}{If}
142 \nodesplit{If2}{If}
143 \controllabel{LoopBegin:succ1}{LoopEnd}
144 \controllabel{LoopBegin:succ2}{If1}
145 \controllabel{If1:succ1}{If2}
146 \controllabel{If2:succ1}{LoopEnd}
147 \controllabel{BeforeLoop}{LoopBegin}
148 \controllabel{If1:succ2}{Exit1}
149 \controllabel{If2:succ2}{Exit2}
150 \end{digraphenv}
151 \caption{A simple loop with two exits.}
152 \end{figure}
153
154 \subsection{Loop Phis}
155 Data flow in loops is modelled with special phi nodes at the beginning and the end of the loop.
156 The \nodename{LoopEnd} node merges every value that is flows into the next loop iteration in associated \nodename{LoopEndPhi} nodes.
157 A corresponding \nodename{LoopBeginPhi} node that is associated with the loop header has a control flow dependency on the \nodename{LoopEndPhi} node.
158 Figure \ref{fig:loop2} shows how a simple counting loop is modelled in the graph.
159
160 \begin{figure}[h]
161 \label{fig:loop2}
162 \centering
163 \begin{digraphenv}{scale=0.5}{layout2}
164 \textnode{BeforeLoop}{Loop entry}
165 \textnode{Exit}{Loop exit}
166 \textnode{n}{n}
167 \textnode{Constant0}{0}
168 \textnode{Constant1}{1}
169 \nodesplit{LoopBegin}{LoopBegin}
170 \node{LoopEnd}{LoopEnd}
171 \nodesplit{If1}{If}
172 \controllabel{LoopBegin:succ1}{LoopEnd}
173 \controllabel{LoopBegin:succ2}{If1}
174 \nodebi{Compare}{&lt;}
175 \nodebi{LoopBeginPhi}{LoopBeginPhi}
176 \nodebi{Add}{+}
177 \datalabel{Add:in1}{LoopBeginPhi}
178 \datalabel{Add:in2}{Constant1}
179 \nodebi{LoopEndPhi}{LoopEndPhi}
180 \control{LoopBeginPhi}{LoopEndPhi}
181 \data{LoopEndPhi:in1}{LoopEnd}
182 \data{LoopEndPhi:in2}{Add}
183 \datalabel{LoopBeginPhi:in1}{LoopBegin}
184 \datalabel{LoopBeginPhi:in2}{Constant0}
185 \datalabel{Compare:in1}{LoopBeginPhi}
186 \datalabel{Compare:in2}{n}
187 \data{If1}{Compare}
188 \controllabel{If1:succ1}{LoopEnd}
189 \controllabel{BeforeLoop}{LoopBegin}
190 \controllabel{If1:succ2}{Exit}
191 \end{digraphenv}
192 \caption{Graal compiler graph for a loop counting from 0 to n-1.}
193 \end{figure}
194
195 \subsection{Loop Counters}
196 The compiler is capable of recognizing variables that are only increased within a loop.
197 A potential overflow of such a variable is guarded with a trap before the loop.
198 Figure \ref{fig:loop3} shows the compiler graph of the example loop after the loop counter transformation.
199
200
201 \begin{figure}[h]
202 \label{fig:loop3}
203 \centering
204 \begin{digraphenv}{scale=0.5}{layout3}
205 \textnode{BeforeLoop}{Loop entry}
206 \textnode{Exit}{Loop exit}
207 \textnode{n}{n}
208 \textnode{Constant0}{0}
209 \textnode{Constant1}{1}
210 \nodesplit{LoopBegin}{LoopBegin}
211 \node{LoopEnd}{LoopEnd}
212 \nodesplit{If1}{If}
213 \controllabel{LoopBegin:succ1}{LoopEnd}
214 \controllabel{LoopBegin:succ2}{If1}
215 \nodebi{Compare}{&lt;}
216 \nodetri{LoopCounter}{LoopCounter}
217 \datalabel{LoopCounter:in1}{LoopBegin}
218 \datalabeltext{LoopCounter:in2}{Constant0}{init}
219 \datalabeltext{LoopCounter:in3}{Constant1}{stride}
220 \datalabel{Compare:in1}{LoopCounter}
221 \datalabel{Compare:in2}{n}
222 \data{If1}{Compare}
223 \controllabel{If1:succ1}{LoopEnd}
224 \controllabel{BeforeLoop}{LoopBegin}
225 \controllabel{If1:succ2}{Exit}
226 \end{digraphenv}
227 \caption{Graal compiler graph after loop counter transformation.}
228 \end{figure}
229
230 \subsection{Bounded Loops}
231
232 If the total maximum number of iterations of a loop is fixed, then the loop is converted into a bounded loop.
233 The total number of iterations always denotes the number of full iterations of the loop with the control flowing from the loop begin to the loop end.
234 If the totel number of iterations is reached, the loop is exited directly from the loop header.
235 In the example, we can infer from the loop exit with the comparison on the loop counter that the total number of iterations of the loop is limited to n.
236 Figure \ref{fig:loop4} shows the compiler graph of the example loop after the bounded loop transformation.
237
238 \begin{figure}[h]
239 \label{fig:loop4}
240 \centering
241 \begin{digraphenv}{scale=0.5}{layout4}
242 \textnode{BeforeLoop}{Loop entry}
243 \textnode{Exit}{Loop exit}
244 \textnode{n}{n}
245 \textnode{Constant0}{0}
246 \textnode{Constant1}{1}
247 \nodesplittri{LoopBegin}{BoundedLoopBegin}
248 \node{LoopEnd}{LoopEnd}
249 \controllabel{LoopBegin:succ1}{LoopEnd}
250 \controllabel{LoopBegin:succ2}{LoopEnd}
251 \controllabel{LoopBegin:succ3}{Exit}
252 \nodetri{LoopCounter}{LoopCounter}
253 \datalabel{LoopCounter:in1}{LoopBegin}
254 \datalabeltext{LoopCounter:in2}{Constant0}{init}
255 \datalabeltext{LoopCounter:in3}{Constant1}{stride}
256 \data{LoopBegin}{n}
257 \controllabel{BeforeLoop}{LoopBegin}
258 \end{digraphenv}
259 \caption{Graal compiler graph after bounded loop transformation.}
260 \end{figure}
261
262 \subsection{Vectorization}
263
264 If we have now a bounded loop with no additional loop exit and no associated phi nodes (only associated loop counters), we can vectorize the loop.
265 We replace the loop header with a normal instruction that produces a vector of values from 0 to the number of loop iterations minus 1.
266 The loop counters are replaced with \texttt{VectorAdd} and \texttt{VectorMul} nodes.
267 The vectorization is only possible if every node of the loop can be replaced with a corresponding vector node.
268 Figure \ref{fig:loop5} shows the compiler graph of the example loop after vectorization.
269 The vector nodes all work on an ordered list of integer values and are subject to canonicalization like any other node.
270
271
272 \begin{figure}[h]
273 \label{fig:loop5}
274 \centering
275 \begin{digraphenv}{scale=0.5}{layout5}
276 \textnode{Entry}{Entry}
277 \textnode{Exit}{Exit}
278 \textnode{n}{n}
279 \textnode{Constant0}{0}
280 \textnode{Constant1}{1}
281 \node{Vector}{Vector}
282 \nodebi{VectorAdd}{VectorAdd}
283 \nodebi{VectorMul}{VectorMul}
284 \control{Entry}{Vector}
285 \control{Vector}{Exit}
286 \datalabel{VectorAdd:in1}{Vector}
287 \datalabel{VectorAdd:in2}{Constant0}
288 \datalabel{VectorMul:in1}{VectorAdd}
289 \datalabel{VectorMul:in2}{Constant1}
290 \data{Vector}{n}
291 \end{digraphenv}
292 \caption{Graal compiler graph after bounded loop transformation.}
293 \end{figure}
121 294
122 \subsection{Project Source Structure} 295 \subsection{Project Source Structure}
123 In order to have clear interfaces between the different parts of the compiler, the code will be divided into the following source code projects: 296 In order to have clear interfaces between the different parts of the compiler, the code will be divided into the following source code projects:
124 \cw{Use new naming scheme com.oracle.graal...} 297 \cw{Use new naming scheme com.oracle.graal...}
125 \begin{description} 298 \begin{description}
154 \end{itemize} 327 \end{itemize}
155 328
156 \subsection{Nodes and Graphs} 329 \subsection{Nodes and Graphs}
157 The most important aspect of a compiler is the data structure that holds information about an executable piece of code, called \emph{intermediate representation}~(IR). 330 The most important aspect of a compiler is the data structure that holds information about an executable piece of code, called \emph{intermediate representation}~(IR).
158 The IR used in the Graal Compiler (simply referred to as \emph{the compiler} in the rest of this document) was designed in such a way as to allow for extensive optimizations, easy traversal, compact storage and efficient processing. 331 The IR used in the Graal Compiler (simply referred to as \emph{the compiler} in the rest of this document) was designed in such a way as to allow for extensive optimizations, easy traversal, compact storage and efficient processing.
332
333 \subsubsection{The Graph Data Structure}
334 \begin{itemize}
335 \item A graph deals out ids for new nodes and can be queried for the node corresponding to a given id.
336 \item Graphs can manage side data structures, which will be automatically invalidated and lazily recomputed whenever the graph changes. Examples for side data structures are dominator trees and temporary schedules. These side data structures will usually be understood by more than one optimization.
337 \end{itemize}
159 338
160 \subsubsection{The Node Data Structure} 339 \subsubsection{The Node Data Structure}
161 \begin{itemize} 340 \begin{itemize}
162 \item Each node is always associated with a graph. 341 \item Each node is always associated with a graph.
163 \item Each node has an immutable id which is unique within its associated graph. \cw{The server compiler supports ``renumbering'' of nodes to make the ids dense again after large graph manipulations that deleted many nodes.} 342 \item Each node has an immutable id which is unique within its associated graph. \cw{The server compiler supports ``renumbering'' of nodes to make the ids dense again after large graph manipulations that deleted many nodes.}
178 \item Only inputs and successors can be changed, and changes to them will update the usages and predecessors. 357 \item Only inputs and successors can be changed, and changes to them will update the usages and predecessors.
179 \item The Node class needs to provide facilities for subclasses to perform actions upon cloning, dependency changes, etc. 358 \item The Node class needs to provide facilities for subclasses to perform actions upon cloning, dependency changes, etc.
180 \item Nodes cannot be reassigned to another graph, they are cloned instead \cw{Why should there be the need for more than one graph when compiling a method?} 359 \item Nodes cannot be reassigned to another graph, they are cloned instead \cw{Why should there be the need for more than one graph when compiling a method?}
181 \end{itemize} 360 \end{itemize}
182 361
183 \subsubsection{The Graph Data Structure}
184 \begin{itemize}
185 \item A graph deals out ids for new nodes and can be queried for the node corresponding to a given id.
186 \item Graphs can manage side data structures, which will be automatically invalidated and lazily recomputed whenever the graph changes. Examples for side data structures are dominator trees and temporary schedules. These side data structures will usually be understood by more than one optimization.
187 \item Graphs are
188 \end{itemize}
189
190 \subsection{Frame States} 362 \subsection{Frame States}
191 Frame states capture the state of the program, in terms of the source representation (e.g., Java state: local variables, expressions, ...). 363 Frame states capture the state of the program, in terms of the source representation (e.g., Java state: local variables, expressions, ...).
192 Whenever a safepoint is reached or \cw{why is that an ``or'', both is basically the same} \ls{uncommon traps can be introduced at other points, e.g., at an if branch that isn't compiled} a deoptimization is needed a valid frame state needs to be available. 364 Whenever a safepoint is reached or \cw{why is that an ``or'', both is basically the same} \ls{uncommon traps can be introduced at other points, e.g., at an if branch that isn't compiled} a deoptimization is needed a valid frame state needs to be available.
193 A frame state is valid as long as the program performs only actions that can safely be reexecuted (e.g., operations on local variables, loads, etc.). 365 A frame state is valid as long as the program performs only actions that can safely be reexecuted (e.g., operations on local variables, loads, etc.).
194 Thus, frame states need only be generated for bytecodes that cannot be reexecuted: 366 Thus, frame states need only be generated for bytecodes that cannot be reexecuted:
201 \item Synchronization: {\tt MONITORENTER, MONITOREXIT} 373 \item Synchronization: {\tt MONITORENTER, MONITOREXIT}
202 \end{itemize} 374 \end{itemize}
203 375
204 Within the node graph a frame state is represented as a node that is fixed to the node that caused it to be generated (control dependency). 376 Within the node graph a frame state is represented as a node that is fixed to the node that caused it to be generated (control dependency).
205 377
378
379 \begin{figure}[h]
380 \label{fig:fs1}
381 \centering
206 \begin{digraphenv}{scale=0.5}{fs1} 382 \begin{digraphenv}{scale=0.5}{fs1}
207 \nodetrisplit{store1}{ArrayStore} 383 \nodetrisplit{store1}{ArrayStore}
208 \nodebi{load1}{ArrayLoad} 384 \nodebi{load1}{ArrayLoad}
209 \controllabel{store1:succ1}{load1} 385 \controllabel{store1:succ1}{load1}
210 \nodetrisplit{store2}{ArrayStore} 386 \nodetrisplit{store2}{ArrayStore}
215 \nodeframestate{fs1}{FrameState} 391 \nodeframestate{fs1}{FrameState}
216 \controllabel{store1:succ2}{fs1} 392 \controllabel{store1:succ2}{fs1}
217 \nodeframestate{fs2}{FrameState} 393 \nodeframestate{fs2}{FrameState}
218 \controllabel{store2:succ2}{fs2} 394 \controllabel{store2:succ2}{fs2}
219 \end{digraphenv} 395 \end{digraphenv}
396 \caption{Simple example using two frame states.}
397 \end{figure}
220 398
221 FrameStates also have data dependencies on the contents of the state: the local variables and the expression stack. 399 FrameStates also have data dependencies on the contents of the state: the local variables and the expression stack.
222 400
223 \subsection{Deoptimization and Uncommon Traps} 401 \subsection{Deoptimization and Uncommon Traps}
224 Uncommon trap nodes are not fixed to a certain frame state node, they can move around freely and will always use the correct frame state. 402 Uncommon trap nodes are not fixed to a certain frame state node, they can move around freely and will always use the correct frame state.
225 The node that is guarded by the deoptimization has a data dependency on the trap, and the trap in turn has a data dependency on the condition and on the most distant node that is postdominated by the guarded node. 403 The node that is guarded by the deoptimization has a data dependency on the trap, and the trap in turn has a data dependency on the condition and on the most distant node that is postdominated by the guarded node.
226 404
405 \begin{figure}[h]
406 \label{fig:trap1}
407 \centering
227 \begin{digraphenv}{scale=0.5}{trap1} 408 \begin{digraphenv}{scale=0.5}{trap1}
228 \nodesplit{if}{If} 409 \nodesplit{if}{If}
229 \node{split1}{Split} 410 \node{split1}{Split}
230 \controllabel{if:succ1}{split1} 411 \controllabel{if:succ1}{split1}
231 nop [shape=plaintext, label="..."] 412 nop [shape=plaintext, label="..."]
250 \data{cmpnull}{o2} 431 \data{cmpnull}{o2}
251 \datalabel{trap:in2}{cmpnull} 432 \datalabel{trap:in2}{cmpnull}
252 \datalabel{load2:in1}{trap} 433 \datalabel{load2:in1}{trap}
253 \datalabel{trap:in1}{split2} 434 \datalabel{trap:in1}{split2}
254 \end{digraphenv} 435 \end{digraphenv}
255 436 \caption{In this example, the second load is guarded by an uncommon trap, because its receiver might be null (the receiver of the load is assumed to be non-null).
256 \emph{\small In this example, the second load is guarded by an uncommon trap, because its receiver might be null (the receiver of the load is assumed to be non-null).
257 The trap is anchored to the control split, because as soon as this node is executed the second load must be executed as well. 437 The trap is anchored to the control split, because as soon as this node is executed the second load must be executed as well.
258 In the final scheduling the trap can be placed before or after the first load.} 438 In the final scheduling the trap can be placed before or after the first load.}
439 \end{figure}
259 440
260 Another type of uncommon trap is a guard, which is used to remove branches that have a very low execution frequency from the compiled code. 441 Another type of uncommon trap is a guard, which is used to remove branches that have a very low execution frequency from the compiled code.
261 442
443 \begin{figure}[h]
444 \label{fig:trap2}
445 \centering
262 \begin{digraphenv}{scale=0.5}{trap2} 446 \begin{digraphenv}{scale=0.5}{trap2}
263 start [shape=plaintext, label="..."] 447 start [shape=plaintext, label="..."]
264 start2 [shape=plaintext, label=""] 448 start2 [shape=plaintext, label=""]
265 start3 [shape=plaintext, label=""] 449 start3 [shape=plaintext, label=""]
266 \control{start}{guard} 450 \control{start}{guard}
273 \nodetrap{trap}{Trap} 457 \nodetrap{trap}{Trap}
274 \datalabel{trap:in1}{start2} 458 \datalabel{trap:in1}{start2}
275 \datalabel{trap:in2}{start3} 459 \datalabel{trap:in2}{start3}
276 \data{guard}{trap} 460 \data{guard}{trap}
277 \end{digraphenv} 461 \end{digraphenv}
278 462 \caption{In this example the If from the previous example was replaced by a guard and an uncommon trap.
279
280 \emph{\small In this example the If from the previous example was replaced by a guard and an uncommon trap.
281 The guard takes the place of the If in the control flow, and is connected to the trap node. 463 The guard takes the place of the If in the control flow, and is connected to the trap node.
282 The uncommon trap is now anchored to the most distant node of which the If was a postdominator.} 464 The uncommon trap is now anchored to the most distant node of which the If was a postdominator.}
465 \end{figure}
283 466
284 At some point during the compilation, trap nodes need to be fixed, which means that appropriate data and control dependencies will be inserted so that they cannot move outside the scope of the associated frame state. 467 At some point during the compilation, trap nodes need to be fixed, which means that appropriate data and control dependencies will be inserted so that they cannot move outside the scope of the associated frame state.
285 This will generate deoptimization-free zones that can be targeted by the most aggressive optimizations. 468 This will generate deoptimization-free zones that can be targeted by the most aggressive optimizations.
286 A simple algorithm for this removal of FrameStates would be to move all traps as far upwards as possible. 469 A simple algorithm for this removal of FrameStates would be to move all traps as far upwards as possible.
287 470
288 471
289 Multiple Traps with the same condition and anchor can be merged: 472 Multiple Traps with the same condition and anchor can be merged:
290 473
474 \begin{figure}[h]
475 \label{fig:trap3}
476 \centering
291 \begin{digraphenv}{scale=0.5}{trap3} 477 \begin{digraphenv}{scale=0.5}{trap3}
292 \nodesplit{if}{If} 478 \nodesplit{if}{If}
293 \node{split1}{Split} 479 \node{split1}{Split}
294 \controllabel{if:succ1}{split1} 480 \controllabel{if:succ1}{split1}
295 nop [shape=plaintext, label="..."] 481 nop [shape=plaintext, label="..."]
314 \datalabel{trap:in2}{cmpnull} 500 \datalabel{trap:in2}{cmpnull}
315 \datalabel{load2:in1}{trap} 501 \datalabel{load2:in1}{trap}
316 \datalabel{load1:in1}{trap} 502 \datalabel{load1:in1}{trap}
317 \datalabel{trap:in1}{split2} 503 \datalabel{trap:in1}{split2}
318 \end{digraphenv} 504 \end{digraphenv}
505 \caption{Two loads guarded by the same Trap.}
506 \end{figure}
319 507
320 Also, if two Traps that are anchored to the true and false branch of the same If have the same condition, they can be merged, so that the resulting Trap is anchored at the most distant node of which the If is a postdominator. 508 Also, if two Traps that are anchored to the true and false branch of the same If have the same condition, they can be merged, so that the resulting Trap is anchored at the most distant node of which the If is a postdominator.
321 509
322 %Frame states should be represented using a delta-encoding. 510 %Frame states should be represented using a delta-encoding.
323 %This will make them significantly smaller and will make inlining, etc. much easier. 511 %This will make them significantly smaller and will make inlining, etc. much easier.
325 513
326 \subsection{Graph Building} 514 \subsection{Graph Building}
327 \begin{itemize} 515 \begin{itemize}
328 \item The graph built by the initial parser (called \emph{GraphBuilder}) should be as close to the source representation (bytecode, ...) as possible. 516 \item The graph built by the initial parser (called \emph{GraphBuilder}) should be as close to the source representation (bytecode, ...) as possible.
329 \item All nodes should be able to immediately lower themselves to a machine-level representation. \cw{What is that? You mean every node has x86 specific code that spits out machine code? Hope you are joking...} This allows for easier compiler development, and also leads to a compiler that is very flexible in the amount of optimizations it performs (e.g. recompilation of methods with more aggressive optimizations). 517 \item All nodes should be able to immediately lower themselves to a machine-level representation. \cw{What is that? You mean every node has x86 specific code that spits out machine code? Hope you are joking...} This allows for easier compiler development, and also leads to a compiler that is very flexible in the amount of optimizations it performs (e.g. recompilation of methods with more aggressive optimizations).
330 \item
331 \end{itemize} 518 \end{itemize}
332 519
333 \subsection{Graphical Representation} 520 \subsection{Graphical Representation}
334 The graphs in this document use the following node layout: 521 The graphs in this document use the following node layout:
335 522
336 \begin{digraphenv}{scale=0.5}{layout01} 523 \begin{digraphenv}{scale=0.5}{graphrep1}
337 \node{node1}{nop} 524 \node{node1}{nop}
338 \nodebi{node2}{+} 525 \nodebi{node2}{+}
339 \nodetri{node3}{phi} 526 \nodetri{node3}{phi}
340 \nodesplit{node4}{if} 527 \nodesplit{node4}{if}
341 \end{digraphenv} 528 \end{digraphenv}
343 \cw{That doesn't compile with my latex. What do I have to do to get it working?} 530 \cw{That doesn't compile with my latex. What do I have to do to get it working?}
344 \ls{graphviz needs to be installed, and pdflatex needs to be started with -shell-escape} 531 \ls{graphviz needs to be installed, and pdflatex needs to be started with -shell-escape}
345 532
346 Red arrows always represents control dependencies, while black arrows represent data dependencies: 533 Red arrows always represents control dependencies, while black arrows represent data dependencies:
347 534
348 \begin{digraphenv}{scale=0.5}{layout1} 535 \begin{digraphenv}{scale=0.5}{graphrep2}
349 \node{a}{a} 536 \node{a}{a}
350 \node{b}{b} 537 \node{b}{b}
351 \nodesplit{if}{if} 538 \nodesplit{if}{if}
352 \node{nop}{nop} 539 \node{nop}{nop}
353 \nodebi{add}{+} 540 \nodebi{add}{+}