1// RUN: mlir-opt -buffer-hoisting -split-input-file %s | FileCheck %s
2
3// This file checks the behaviour of BufferHoisting pass for moving Alloc
4// operations to their correct positions.
5
6// Test Case:
7//    bb0
8//   /   \
9//  bb1  bb2 <- Initial position of AllocOp
10//   \   /
11//    bb3
12// BufferHoisting expected behavior: It should move the existing AllocOp to
13// the entry block.
14
15// CHECK-LABEL: func @condBranch
16func @condBranch(%arg0: i1, %arg1: memref<2xf32>, %arg2: memref<2xf32>) {
17  cond_br %arg0, ^bb1, ^bb2
18^bb1:
19  br ^bb3(%arg1 : memref<2xf32>)
20^bb2:
21  %0 = alloc() : memref<2xf32>
22  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
23  br ^bb3(%0 : memref<2xf32>)
24^bb3(%1: memref<2xf32>):
25  test.copy(%1, %arg2) : (memref<2xf32>, memref<2xf32>)
26  return
27}
28
29// CHECK-NEXT: %[[ALLOC:.*]] = alloc()
30// CHECK-NEXT: cond_br
31
32// -----
33
34// Test Case:
35//    bb0
36//   /   \
37//  bb1  bb2 <- Initial position of AllocOp
38//   \   /
39//    bb3
40// BufferHoisting expected behavior: It should not move the existing AllocOp
41// to any other block since the alloc has a dynamic dependency to block argument
42// %0 in bb2.
43
44// CHECK-LABEL: func @condBranchDynamicType
45func @condBranchDynamicType(
46  %arg0: i1,
47  %arg1: memref<?xf32>,
48  %arg2: memref<?xf32>,
49  %arg3: index) {
50  cond_br %arg0, ^bb1, ^bb2(%arg3: index)
51^bb1:
52  br ^bb3(%arg1 : memref<?xf32>)
53^bb2(%0: index):
54  %1 = alloc(%0) : memref<?xf32>
55  test.buffer_based in(%arg1: memref<?xf32>) out(%1: memref<?xf32>)
56  br ^bb3(%1 : memref<?xf32>)
57^bb3(%2: memref<?xf32>):
58  test.copy(%2, %arg2) : (memref<?xf32>, memref<?xf32>)
59  return
60}
61
62// CHECK-NEXT: cond_br
63//      CHECK: ^bb2
64//      CHECK: ^bb2(%[[IDX:.*]]:{{.*}})
65// CHECK-NEXT: %[[ALLOC0:.*]] = alloc(%[[IDX]])
66// CHECK-NEXT: test.buffer_based
67
68// -----
69
70// Test Case:
71//      bb0
72//     /    \
73//   bb1    bb2 <- Initial position of AllocOp
74//    |     /  \
75//    |   bb3  bb4
76//    |     \  /
77//    \     bb5
78//     \    /
79//       bb6
80//        |
81//       bb7
82// BufferHoisting expected behavior: It should not move the existing AllocOp
83// to any other block since the alloc has a dynamic dependency to block argument
84// %0 in bb2.
85
86// CHECK-LABEL: func @condBranchDynamicTypeNested
87func @condBranchDynamicTypeNested(
88  %arg0: i1,
89  %arg1: memref<?xf32>,
90  %arg2: memref<?xf32>,
91  %arg3: index) {
92  cond_br %arg0, ^bb1, ^bb2(%arg3: index)
93^bb1:
94  br ^bb6(%arg1 : memref<?xf32>)
95^bb2(%0: index):
96  %1 = alloc(%0) : memref<?xf32>
97  test.buffer_based in(%arg1: memref<?xf32>) out(%1: memref<?xf32>)
98  cond_br %arg0, ^bb3, ^bb4
99^bb3:
100  br ^bb5(%1 : memref<?xf32>)
101^bb4:
102  br ^bb5(%1 : memref<?xf32>)
103^bb5(%2: memref<?xf32>):
104  br ^bb6(%2 : memref<?xf32>)
105^bb6(%3: memref<?xf32>):
106  br ^bb7(%3 : memref<?xf32>)
107^bb7(%4: memref<?xf32>):
108  test.copy(%4, %arg2) : (memref<?xf32>, memref<?xf32>)
109  return
110}
111
112// CHECK-NEXT: cond_br
113//      CHECK: ^bb2
114//      CHECK: ^bb2(%[[IDX:.*]]:{{.*}})
115// CHECK-NEXT: %[[ALLOC0:.*]] = alloc(%[[IDX]])
116// CHECK-NEXT: test.buffer_based
117
118// -----
119
120// Test Case:
121//    bb0
122//   /   \
123//  |    bb1 <- Initial position of AllocOp
124//   \   /
125//    bb2
126// BufferHoisting expected behavior: It should move the existing AllocOp to
127// the entry block.
128
129// CHECK-LABEL: func @criticalEdge
130func @criticalEdge(%arg0: i1, %arg1: memref<2xf32>, %arg2: memref<2xf32>) {
131  cond_br %arg0, ^bb1, ^bb2(%arg1 : memref<2xf32>)
132^bb1:
133  %0 = alloc() : memref<2xf32>
134  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
135  br ^bb2(%0 : memref<2xf32>)
136^bb2(%1: memref<2xf32>):
137  test.copy(%1, %arg2) : (memref<2xf32>, memref<2xf32>)
138  return
139}
140
141// CHECK-NEXT: %[[ALLOC:.*]] = alloc()
142// CHECK-NEXT: cond_br
143
144// -----
145
146// Test Case:
147//    bb0 <- Initial position of the first AllocOp
148//   /   \
149//  bb1  bb2
150//   \   /
151//    bb3 <- Initial position of the second AllocOp
152// BufferHoisting expected behavior: It shouldn't move the AllocOps.
153
154// CHECK-LABEL: func @ifElse
155func @ifElse(%arg0: i1, %arg1: memref<2xf32>, %arg2: memref<2xf32>) {
156  %0 = alloc() : memref<2xf32>
157  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
158  cond_br %arg0,
159    ^bb1(%arg1, %0 : memref<2xf32>, memref<2xf32>),
160    ^bb2(%0, %arg1 : memref<2xf32>, memref<2xf32>)
161^bb1(%1: memref<2xf32>, %2: memref<2xf32>):
162  br ^bb3(%1, %2 : memref<2xf32>, memref<2xf32>)
163^bb2(%3: memref<2xf32>, %4: memref<2xf32>):
164  br ^bb3(%3, %4 : memref<2xf32>, memref<2xf32>)
165^bb3(%5: memref<2xf32>, %6: memref<2xf32>):
166  %7 = alloc() : memref<2xf32>
167  test.buffer_based in(%7: memref<2xf32>) out(%7: memref<2xf32>)
168  test.copy(%7, %arg2) : (memref<2xf32>, memref<2xf32>)
169  return
170}
171
172// CHECK-NEXT: %[[ALLOC0:.*]] = alloc()
173// CHECK-NEXT: test.buffer_based
174//      CHECK: br ^bb3
175//      CHECK: br ^bb3
176// CHECK-NEXT: ^bb3
177//      CHECK: %[[ALLOC1:.*]] = alloc()
178// CHECK-NEXT: test.buffer_based
179//      CHECK: test.copy(%[[ALLOC1]]
180// CHECK-NEXT: return
181
182// -----
183
184// Test Case: No users for buffer in if-else CFG
185//    bb0 <- Initial position of AllocOp
186//   /   \
187//  bb1  bb2
188//   \   /
189//    bb3
190// BufferHoisting expected behavior: It shouldn't move the AllocOp.
191
192// CHECK-LABEL: func @ifElseNoUsers
193func @ifElseNoUsers(%arg0: i1, %arg1: memref<2xf32>, %arg2: memref<2xf32>) {
194  %0 = alloc() : memref<2xf32>
195  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
196  cond_br %arg0,
197    ^bb1(%arg1, %0 : memref<2xf32>, memref<2xf32>),
198    ^bb2(%0, %arg1 : memref<2xf32>, memref<2xf32>)
199^bb1(%1: memref<2xf32>, %2: memref<2xf32>):
200  br ^bb3(%1, %2 : memref<2xf32>, memref<2xf32>)
201^bb2(%3: memref<2xf32>, %4: memref<2xf32>):
202  br ^bb3(%3, %4 : memref<2xf32>, memref<2xf32>)
203^bb3(%5: memref<2xf32>, %6: memref<2xf32>):
204  test.copy(%arg1, %arg2) : (memref<2xf32>, memref<2xf32>)
205  return
206}
207
208// CHECK-NEXT: %[[ALLOC0:.*]] = alloc()
209// CHECK-NEXT: test.buffer_based
210
211// -----
212
213// Test Case:
214//      bb0 <- Initial position of the first AllocOp
215//     /    \
216//   bb1    bb2
217//    |     /  \
218//    |   bb3  bb4
219//    \     \  /
220//     \     /
221//       bb5 <- Initial position of the second AllocOp
222// BufferHoisting expected behavior: AllocOps shouldn't be moved.
223
224// CHECK-LABEL: func @ifElseNested
225func @ifElseNested(%arg0: i1, %arg1: memref<2xf32>, %arg2: memref<2xf32>) {
226  %0 = alloc() : memref<2xf32>
227  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
228  cond_br %arg0,
229    ^bb1(%arg1, %0 : memref<2xf32>, memref<2xf32>),
230    ^bb2(%0, %arg1 : memref<2xf32>, memref<2xf32>)
231^bb1(%1: memref<2xf32>, %2: memref<2xf32>):
232  br ^bb5(%1, %2 : memref<2xf32>, memref<2xf32>)
233^bb2(%3: memref<2xf32>, %4: memref<2xf32>):
234  cond_br %arg0, ^bb3(%3 : memref<2xf32>), ^bb4(%4 : memref<2xf32>)
235^bb3(%5: memref<2xf32>):
236  br ^bb5(%5, %3 : memref<2xf32>, memref<2xf32>)
237^bb4(%6: memref<2xf32>):
238  br ^bb5(%3, %6 : memref<2xf32>, memref<2xf32>)
239^bb5(%7: memref<2xf32>, %8: memref<2xf32>):
240  %9 = alloc() : memref<2xf32>
241  test.buffer_based in(%7: memref<2xf32>) out(%9: memref<2xf32>)
242  test.copy(%9, %arg2) : (memref<2xf32>, memref<2xf32>)
243  return
244}
245
246// CHECK-NEXT: %[[ALLOC0:.*]] = alloc()
247// CHECK-NEXT: test.buffer_based
248//      CHECK: br ^bb5
249//      CHECK: br ^bb5
250//      CHECK: br ^bb5
251// CHECK-NEXT: ^bb5
252//      CHECK: %[[ALLOC1:.*]] = alloc()
253// CHECK-NEXT: test.buffer_based
254
255// -----
256
257// Test Case: Dead operations in a single block.
258// BufferHoisting expected behavior: It shouldn't move the AllocOps.
259
260// CHECK-LABEL: func @redundantOperations
261func @redundantOperations(%arg0: memref<2xf32>) {
262  %0 = alloc() : memref<2xf32>
263  test.buffer_based in(%arg0: memref<2xf32>) out(%0: memref<2xf32>)
264  %1 = alloc() : memref<2xf32>
265  test.buffer_based in(%0: memref<2xf32>) out(%1: memref<2xf32>)
266  return
267}
268
269// CHECK-NEXT: %[[ALLOC0:.*]] = alloc()
270// CHECK-NEXT: test.buffer_based
271//      CHECK: %[[ALLOC1:.*]] = alloc()
272// CHECK-NEXT: test.buffer_based
273
274// -----
275
276// Test Case:
277//                                     bb0
278//                                    /   \
279// Initial pos of the 1st AllocOp -> bb1  bb2 <- Initial pos of the 2nd AllocOp
280//                                    \   /
281//                                     bb3
282// BufferHoisting expected behavior: Both AllocOps should be moved to the
283// entry block.
284
285// CHECK-LABEL: func @moving_alloc_and_inserting_missing_dealloc
286func @moving_alloc_and_inserting_missing_dealloc(
287  %cond: i1,
288    %arg0: memref<2xf32>,
289    %arg1: memref<2xf32>) {
290  cond_br %cond, ^bb1, ^bb2
291^bb1:
292  %0 = alloc() : memref<2xf32>
293  test.buffer_based in(%arg0: memref<2xf32>) out(%0: memref<2xf32>)
294  br ^exit(%0 : memref<2xf32>)
295^bb2:
296  %1 = alloc() : memref<2xf32>
297  test.buffer_based in(%arg0: memref<2xf32>) out(%1: memref<2xf32>)
298  br ^exit(%1 : memref<2xf32>)
299^exit(%arg2: memref<2xf32>):
300  test.copy(%arg2, %arg1) : (memref<2xf32>, memref<2xf32>)
301  return
302}
303
304// CHECK-NEXT: %{{.*}} = alloc()
305// CHECK-NEXT: %{{.*}} = alloc()
306// CHECK-NEXT: cond_br
307
308// -----
309
310// Test Case: Invalid position of the DeallocOp. There is a user after
311// deallocation.
312//   bb0
313//  /   \
314// bb1  bb2 <- Initial position of AllocOp
315//  \   /
316//   bb3
317// BufferHoisting expected behavior: It should move the AllocOp to the entry
318// block.
319
320// CHECK-LABEL: func @moving_invalid_dealloc_op_complex
321func @moving_invalid_dealloc_op_complex(
322  %cond: i1,
323    %arg0: memref<2xf32>,
324    %arg1: memref<2xf32>) {
325  cond_br %cond, ^bb1, ^bb2
326^bb1:
327  br ^exit(%arg0 : memref<2xf32>)
328^bb2:
329  %1 = alloc() : memref<2xf32>
330  test.buffer_based in(%arg0: memref<2xf32>) out(%1: memref<2xf32>)
331  dealloc %1 : memref<2xf32>
332  br ^exit(%1 : memref<2xf32>)
333^exit(%arg2: memref<2xf32>):
334  test.copy(%arg2, %arg1) : (memref<2xf32>, memref<2xf32>)
335  return
336}
337
338// CHECK-NEXT: %{{.*}} = alloc()
339// CHECK-NEXT: cond_br
340
341// -----
342
343// Test Case: Nested regions - This test defines a BufferBasedOp inside the
344// region of a RegionBufferBasedOp.
345// BufferHoisting expected behavior: The AllocOp for the BufferBasedOp should
346// remain inside the region of the RegiobBufferBasedOp. The AllocOp of the
347// RegionBufferBasedOp should be moved to the entry block.
348
349// CHECK-LABEL: func @nested_regions_and_cond_branch
350func @nested_regions_and_cond_branch(
351  %arg0: i1,
352  %arg1: memref<2xf32>,
353  %arg2: memref<2xf32>) {
354  cond_br %arg0, ^bb1, ^bb2
355^bb1:
356  br ^bb3(%arg1 : memref<2xf32>)
357^bb2:
358  %0 = alloc() : memref<2xf32>
359  test.region_buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>) {
360  ^bb0(%gen1_arg0: f32, %gen1_arg1: f32):
361    %1 = alloc() : memref<2xf32>
362    test.buffer_based in(%arg1: memref<2xf32>) out(%1: memref<2xf32>)
363    %tmp1 = exp %gen1_arg0 : f32
364    test.region_yield %tmp1 : f32
365  }
366  br ^bb3(%0 : memref<2xf32>)
367^bb3(%1: memref<2xf32>):
368  test.copy(%1, %arg2) : (memref<2xf32>, memref<2xf32>)
369  return
370}
371// CHECK-NEXT:   %[[ALLOC0:.*]] = alloc()
372// CHECK-NEXT:   cond_br
373//      CHECK:   test.region_buffer_based
374//      CHECK:     %[[ALLOC1:.*]] = alloc()
375// CHECK-NEXT:     test.buffer_based
376
377// -----
378
379// Test Case: nested region control flow
380// The alloc position of %1 does not need to be changed and flows through
381// both if branches until it is finally returned.
382
383// CHECK-LABEL: func @nested_region_control_flow
384func @nested_region_control_flow(
385  %arg0 : index,
386  %arg1 : index) -> memref<?x?xf32> {
387  %0 = cmpi eq, %arg0, %arg1 : index
388  %1 = alloc(%arg0, %arg0) : memref<?x?xf32>
389  %2 = scf.if %0 -> (memref<?x?xf32>) {
390    scf.yield %1 : memref<?x?xf32>
391  } else {
392    %3 = alloc(%arg0, %arg1) : memref<?x?xf32>
393    scf.yield %1 : memref<?x?xf32>
394  }
395  return %2 : memref<?x?xf32>
396}
397
398//      CHECK: %[[ALLOC0:.*]] = alloc(%arg0, %arg0)
399// CHECK-NEXT: %{{.*}} = scf.if
400//      CHECK: else
401// CHECK-NEXT: %[[ALLOC1:.*]] = alloc(%arg0, %arg1)
402
403// -----
404
405// Test Case: nested region control flow with a nested buffer allocation in a
406// divergent branch.
407// The alloc positions of %1 does not need to be changed. %3 is moved upwards.
408
409// CHECK-LABEL: func @nested_region_control_flow_div
410func @nested_region_control_flow_div(
411  %arg0 : index,
412  %arg1 : index) -> memref<?x?xf32> {
413  %0 = cmpi eq, %arg0, %arg1 : index
414  %1 = alloc(%arg0, %arg0) : memref<?x?xf32>
415  %2 = scf.if %0 -> (memref<?x?xf32>) {
416    scf.yield %1 : memref<?x?xf32>
417  } else {
418    %3 = alloc(%arg0, %arg1) : memref<?x?xf32>
419    scf.yield %3 : memref<?x?xf32>
420  }
421  return %2 : memref<?x?xf32>
422}
423
424//      CHECK: %[[ALLOC0:.*]] = alloc(%arg0, %arg0)
425// CHECK-NEXT: %[[ALLOC1:.*]] = alloc(%arg0, %arg1)
426// CHECK-NEXT: %{{.*}} = scf.if
427
428// -----
429
430// Test Case: deeply nested region control flow with a nested buffer allocation
431// in a divergent branch.
432// The alloc position of %1 does not need to be changed. Allocs %4 and %5 are
433// moved upwards.
434
435// CHECK-LABEL: func @nested_region_control_flow_div_nested
436func @nested_region_control_flow_div_nested(
437  %arg0 : index,
438  %arg1 : index) -> memref<?x?xf32> {
439  %0 = cmpi eq, %arg0, %arg1 : index
440  %1 = alloc(%arg0, %arg0) : memref<?x?xf32>
441  %2 = scf.if %0 -> (memref<?x?xf32>) {
442    %3 = scf.if %0 -> (memref<?x?xf32>) {
443      scf.yield %1 : memref<?x?xf32>
444    } else {
445      %4 = alloc(%arg0, %arg1) : memref<?x?xf32>
446      scf.yield %4 : memref<?x?xf32>
447    }
448    scf.yield %3 : memref<?x?xf32>
449  } else {
450    %5 = alloc(%arg1, %arg1) : memref<?x?xf32>
451    scf.yield %5 : memref<?x?xf32>
452  }
453  return %2 : memref<?x?xf32>
454}
455//      CHECK: %[[ALLOC0:.*]] = alloc(%arg0, %arg0)
456// CHECK-NEXT: %[[ALLOC1:.*]] = alloc(%arg0, %arg1)
457// CHECK-NEXT: %[[ALLOC2:.*]] = alloc(%arg1, %arg1)
458// CHECK-NEXT: %{{.*}} = scf.if
459
460// -----
461
462// Test Case: nested region control flow within a region interface.
463// The alloc positions of %0 does not need to be changed.
464
465// CHECK-LABEL: func @inner_region_control_flow
466func @inner_region_control_flow(%arg0 : index) -> memref<?x?xf32> {
467  %0 = alloc(%arg0, %arg0) : memref<?x?xf32>
468  %1 = test.region_if %0 : memref<?x?xf32> -> (memref<?x?xf32>) then {
469    ^bb0(%arg1 : memref<?x?xf32>):
470      test.region_if_yield %arg1 : memref<?x?xf32>
471  } else {
472    ^bb0(%arg1 : memref<?x?xf32>):
473      test.region_if_yield %arg1 : memref<?x?xf32>
474  } join {
475    ^bb0(%arg1 : memref<?x?xf32>):
476      test.region_if_yield %arg1 : memref<?x?xf32>
477  }
478  return %1 : memref<?x?xf32>
479}
480
481//      CHECK: %[[ALLOC0:.*]] = alloc(%arg0, %arg0)
482// CHECK-NEXT: {{.*}} test.region_if
483
484// -----
485
486// Test Case: nested region control flow within a region interface including an
487// allocation in a divergent branch.
488// The alloc positions of %0 does not need to be changed. %2 is moved upwards.
489
490// CHECK-LABEL: func @inner_region_control_flow_div
491func @inner_region_control_flow_div(
492  %arg0 : index,
493  %arg1 : index) -> memref<?x?xf32> {
494  %0 = alloc(%arg0, %arg0) : memref<?x?xf32>
495  %1 = test.region_if %0 : memref<?x?xf32> -> (memref<?x?xf32>) then {
496    ^bb0(%arg2 : memref<?x?xf32>):
497      test.region_if_yield %arg2 : memref<?x?xf32>
498  } else {
499    ^bb0(%arg2 : memref<?x?xf32>):
500      %2 = alloc(%arg0, %arg1) : memref<?x?xf32>
501      test.region_if_yield %2 : memref<?x?xf32>
502  } join {
503    ^bb0(%arg2 : memref<?x?xf32>):
504      test.region_if_yield %arg2 : memref<?x?xf32>
505  }
506  return %1 : memref<?x?xf32>
507}
508
509//      CHECK: %[[ALLOC0:.*]] = alloc(%arg0, %arg0)
510// CHECK-NEXT: %[[ALLOC1:.*]] = alloc(%arg0, %arg1)
511// CHECK-NEXT: {{.*}} test.region_if
512
513// -----
514
515// Test Case: Alloca operations shouldn't be moved.
516
517// CHECK-LABEL: func @condBranchAlloca
518func @condBranchAlloca(%arg0: i1, %arg1: memref<2xf32>, %arg2: memref<2xf32>) {
519  cond_br %arg0, ^bb1, ^bb2
520^bb1:
521  br ^bb3(%arg1 : memref<2xf32>)
522^bb2:
523  %0 = alloca() : memref<2xf32>
524  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
525  br ^bb3(%0 : memref<2xf32>)
526^bb3(%1: memref<2xf32>):
527  test.copy(%1, %arg2) : (memref<2xf32>, memref<2xf32>)
528  return
529}
530
531// CHECK-NEXT: cond_br
532//      CHECK: ^bb2
533//      CHECK: ^bb2
534// CHECK-NEXT: %[[ALLOCA:.*]] = alloca()
535// CHECK-NEXT: test.buffer_based
536
537// -----
538
539// Test Case: Alloca operations shouldn't be moved. The alloc operation also
540// shouldn't be moved analogously to the ifElseNested test.
541
542// CHECK-LABEL: func @ifElseNestedAlloca
543func @ifElseNestedAlloca(
544  %arg0: i1,
545  %arg1: memref<2xf32>,
546  %arg2: memref<2xf32>) {
547  %0 = alloca() : memref<2xf32>
548  test.buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>)
549  cond_br %arg0,
550    ^bb1(%arg1, %0 : memref<2xf32>, memref<2xf32>),
551    ^bb2(%0, %arg1 : memref<2xf32>, memref<2xf32>)
552^bb1(%1: memref<2xf32>, %2: memref<2xf32>):
553  br ^bb5(%1, %2 : memref<2xf32>, memref<2xf32>)
554^bb2(%3: memref<2xf32>, %4: memref<2xf32>):
555  cond_br %arg0, ^bb3(%3 : memref<2xf32>), ^bb4(%4 : memref<2xf32>)
556^bb3(%5: memref<2xf32>):
557  br ^bb5(%5, %3 : memref<2xf32>, memref<2xf32>)
558^bb4(%6: memref<2xf32>):
559  br ^bb5(%3, %6 : memref<2xf32>, memref<2xf32>)
560^bb5(%7: memref<2xf32>, %8: memref<2xf32>):
561  %9 = alloc() : memref<2xf32>
562  test.buffer_based in(%7: memref<2xf32>) out(%9: memref<2xf32>)
563  test.copy(%9, %arg2) : (memref<2xf32>, memref<2xf32>)
564  return
565}
566
567// CHECK-NEXT: %[[ALLOCA:.*]] = alloca()
568// CHECK-NEXT: test.buffer_based
569//      CHECK: ^bb5
570//      CHECK: ^bb5
571//      CHECK: ^bb5
572// CHECK-NEXT: ^bb5
573// CHECK-NEXT: %[[ALLOC:.*]] = alloc()
574// CHECK-NEXT: test.buffer_based
575
576// -----
577
578// Test Case: Alloca operations shouldn't be moved. The alloc operation should
579// be moved in the beginning analogous to the nestedRegionsAndCondBranch test.
580
581// CHECK-LABEL: func @nestedRegionsAndCondBranchAlloca
582func @nestedRegionsAndCondBranchAlloca(
583  %arg0: i1,
584  %arg1: memref<2xf32>,
585  %arg2: memref<2xf32>) {
586  cond_br %arg0, ^bb1, ^bb2
587^bb1:
588  br ^bb3(%arg1 : memref<2xf32>)
589^bb2:
590  %0 = alloc() : memref<2xf32>
591  test.region_buffer_based in(%arg1: memref<2xf32>) out(%0: memref<2xf32>) {
592  ^bb0(%gen1_arg0: f32, %gen1_arg1: f32):
593    %1 = alloca() : memref<2xf32>
594    test.buffer_based in(%arg1: memref<2xf32>) out(%1: memref<2xf32>)
595    %tmp1 = exp %gen1_arg0 : f32
596    test.region_yield %tmp1 : f32
597  }
598  br ^bb3(%0 : memref<2xf32>)
599^bb3(%1: memref<2xf32>):
600  test.copy(%1, %arg2) : (memref<2xf32>, memref<2xf32>)
601  return
602}
603// CHECK-NEXT:   %[[ALLOC:.*]] = alloc()
604// CHECK-NEXT:   cond_br
605//      CHECK:   test.region_buffer_based
606//      CHECK:     %[[ALLOCA:.*]] = alloca()
607// CHECK-NEXT:     test.buffer_based
608
609// -----
610
611// Test Case: structured control-flow loop using a nested alloc.
612// The alloc positions of %3 will be moved upwards.
613
614// CHECK-LABEL: func @loop_alloc
615func @loop_alloc(
616  %lb: index,
617  %ub: index,
618  %step: index,
619  %buf: memref<2xf32>,
620  %res: memref<2xf32>) {
621  %0 = alloc() : memref<2xf32>
622  %1 = scf.for %i = %lb to %ub step %step
623    iter_args(%iterBuf = %buf) -> memref<2xf32> {
624    %2 = cmpi eq, %i, %ub : index
625    %3 = alloc() : memref<2xf32>
626    scf.yield %3 : memref<2xf32>
627  }
628  test.copy(%1, %res) : (memref<2xf32>, memref<2xf32>)
629  return
630}
631
632//      CHECK: %[[ALLOC0:.*]] = alloc()
633// CHECK-NEXT: {{.*}} scf.for
634//      CHECK: %[[ALLOC1:.*]] = alloc()
635
636// -----
637
638// Test Case: structured control-flow loop with a nested if operation using
639// a deeply nested buffer allocation.
640// The allocation %4 is not moved upwards.
641
642// CHECK-LABEL: func @loop_nested_if_alloc
643func @loop_nested_if_alloc(
644  %lb: index,
645  %ub: index,
646  %step: index,
647  %buf: memref<2xf32>) -> memref<2xf32> {
648  %0 = alloc() : memref<2xf32>
649  %1 = scf.for %i = %lb to %ub step %step
650    iter_args(%iterBuf = %buf) -> memref<2xf32> {
651    %2 = cmpi eq, %i, %ub : index
652    %3 = scf.if %2 -> (memref<2xf32>) {
653      %4 = alloc() : memref<2xf32>
654      scf.yield %4 : memref<2xf32>
655    } else {
656      scf.yield %0 : memref<2xf32>
657    }
658    scf.yield %3 : memref<2xf32>
659  }
660  return %1 : memref<2xf32>
661}
662
663//      CHECK: %[[ALLOC0:.*]] = alloc()
664// CHECK-NEXT: {{.*}} scf.for
665//      CHECK: %[[ALLOC1:.*]] = alloc()
666
667// -----
668
669// Test Case: several nested structured control-flow loops with a deeply nested
670// buffer allocation inside an if operation.
671// Same behavior is an loop_nested_if_alloc: The allocs are not moved upwards.
672
673// CHECK-LABEL: func @loop_nested_alloc
674func @loop_nested_alloc(
675  %lb: index,
676  %ub: index,
677  %step: index,
678  %buf: memref<2xf32>,
679  %res: memref<2xf32>) {
680  %0 = alloc() : memref<2xf32>
681  %1 = scf.for %i = %lb to %ub step %step
682    iter_args(%iterBuf = %buf) -> memref<2xf32> {
683    %2 = scf.for %i2 = %lb to %ub step %step
684      iter_args(%iterBuf2 = %iterBuf) -> memref<2xf32> {
685      %3 = scf.for %i3 = %lb to %ub step %step
686        iter_args(%iterBuf3 = %iterBuf2) -> memref<2xf32> {
687        %4 = alloc() : memref<2xf32>
688        %5 = cmpi eq, %i, %ub : index
689        %6 = scf.if %5 -> (memref<2xf32>) {
690          %7 = alloc() : memref<2xf32>
691          scf.yield %7 : memref<2xf32>
692        } else {
693          scf.yield %iterBuf3 : memref<2xf32>
694        }
695        scf.yield %6 : memref<2xf32>
696      }
697      scf.yield %3 : memref<2xf32>
698    }
699    scf.yield %2 : memref<2xf32>
700  }
701  test.copy(%1, %res) : (memref<2xf32>, memref<2xf32>)
702  return
703}
704
705//      CHECK: %[[ALLOC0:.*]] = alloc()
706// CHECK-NEXT: {{.*}} = scf.for
707// CHECK-NEXT: {{.*}} = scf.for
708// CHECK-NEXT: {{.*}} = scf.for
709// CHECK-NEXT: %[[ALLOC1:.*]] = alloc()
710//      CHECK: %[[ALLOC2:.*]] = alloc()
711
712// -----
713
714// CHECK-LABEL: func @loop_nested_alloc_dyn_dependency
715func @loop_nested_alloc_dyn_dependency(
716  %lb: index,
717  %ub: index,
718  %step: index,
719  %arg0: index,
720  %buf: memref<?xf32>,
721  %res: memref<?xf32>) {
722  %0 = alloc(%arg0) : memref<?xf32>
723  %1 = scf.for %i = %lb to %ub step %step
724    iter_args(%iterBuf = %buf) -> memref<?xf32> {
725    %2 = scf.for %i2 = %lb to %ub step %step
726      iter_args(%iterBuf2 = %iterBuf) -> memref<?xf32> {
727      %3 = scf.for %i3 = %lb to %ub step %step
728        iter_args(%iterBuf3 = %iterBuf2) -> memref<?xf32> {
729        %5 = cmpi eq, %i, %ub : index
730        %6 = scf.if %5 -> (memref<?xf32>) {
731          %7 = alloc(%i3) : memref<?xf32>
732          scf.yield %7 : memref<?xf32>
733        } else {
734          scf.yield %iterBuf3 : memref<?xf32>
735        }
736        scf.yield %6 : memref<?xf32>
737      }
738      scf.yield %3 : memref<?xf32>
739    }
740    scf.yield %0 : memref<?xf32>
741  }
742  test.copy(%1, %res) : (memref<?xf32>, memref<?xf32>)
743  return
744}
745
746
747//      CHECK: %[[ALLOC0:.*]] = alloc({{.*}})
748// CHECK-NEXT: {{.*}} = scf.for
749// CHECK-NEXT: {{.*}} = scf.for
750// CHECK-NEXT: {{.*}} = scf.for
751//      CHECK: %[[ALLOC1:.*]] = alloc({{.*}})
752