1*c7a0c2d0SMatt Arsenault; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 -verify-machineinstrs < %s | FileCheck %s
2*c7a0c2d0SMatt Arsenault
3*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}recursive:
4*c7a0c2d0SMatt Arsenault; CHECK: ScratchSize: 16
5*c7a0c2d0SMatt Arsenaultdefine void @recursive() {
6*c7a0c2d0SMatt Arsenault  call void @recursive()
7*c7a0c2d0SMatt Arsenault  store volatile i32 0, i32 addrspace(1)* undef
8*c7a0c2d0SMatt Arsenault  ret void
9*c7a0c2d0SMatt Arsenault}
10*c7a0c2d0SMatt Arsenault
11*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}tail_recursive:
12*c7a0c2d0SMatt Arsenault; CHECK: ScratchSize: 0
13*c7a0c2d0SMatt Arsenaultdefine void @tail_recursive() {
14*c7a0c2d0SMatt Arsenault  tail call void @tail_recursive()
15*c7a0c2d0SMatt Arsenault  ret void
16*c7a0c2d0SMatt Arsenault}
17*c7a0c2d0SMatt Arsenault
18*c7a0c2d0SMatt Arsenaultdefine void @calls_tail_recursive() norecurse {
19*c7a0c2d0SMatt Arsenault  tail call void @tail_recursive()
20*c7a0c2d0SMatt Arsenault  ret void
21*c7a0c2d0SMatt Arsenault}
22*c7a0c2d0SMatt Arsenault
23*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}tail_recursive_with_stack:
24*c7a0c2d0SMatt Arsenaultdefine void @tail_recursive_with_stack() {
25*c7a0c2d0SMatt Arsenault  %alloca = alloca i32, addrspace(5)
26*c7a0c2d0SMatt Arsenault  store volatile i32 0, i32 addrspace(5)* %alloca
27*c7a0c2d0SMatt Arsenault  tail call void @tail_recursive_with_stack()
28*c7a0c2d0SMatt Arsenault  ret void
29*c7a0c2d0SMatt Arsenault}
30*c7a0c2d0SMatt Arsenault
31*c7a0c2d0SMatt Arsenault; For an arbitrary recursive call, report a large number for unknown stack usage.
32*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}calls_recursive:
33*c7a0c2d0SMatt Arsenault; CHECK: .amdhsa_private_segment_fixed_size 16400{{$}}
34*c7a0c2d0SMatt Arsenaultdefine amdgpu_kernel void @calls_recursive() {
35*c7a0c2d0SMatt Arsenault  call void @recursive()
36*c7a0c2d0SMatt Arsenault  ret void
37*c7a0c2d0SMatt Arsenault}
38*c7a0c2d0SMatt Arsenault
39*c7a0c2d0SMatt Arsenault; Make sure we do not report a huge stack size for tail recursive
40*c7a0c2d0SMatt Arsenault; functions
41*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}kernel_indirectly_calls_tail_recursive:
42*c7a0c2d0SMatt Arsenault; CHECK: .amdhsa_private_segment_fixed_size 0{{$}}
43*c7a0c2d0SMatt Arsenaultdefine amdgpu_kernel void @kernel_indirectly_calls_tail_recursive() {
44*c7a0c2d0SMatt Arsenault  call void @calls_tail_recursive()
45*c7a0c2d0SMatt Arsenault  ret void
46*c7a0c2d0SMatt Arsenault}
47*c7a0c2d0SMatt Arsenault
48*c7a0c2d0SMatt Arsenault; TODO: Even though tail_recursive is only called as a tail call, we
49*c7a0c2d0SMatt Arsenault; end up treating it as generally recursive call from the regular call
50*c7a0c2d0SMatt Arsenault; in the kernel.
51*c7a0c2d0SMatt Arsenault
52*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}kernel_calls_tail_recursive:
53*c7a0c2d0SMatt Arsenault; CHECK: .amdhsa_private_segment_fixed_size 16384{{$}}
54*c7a0c2d0SMatt Arsenaultdefine amdgpu_kernel void @kernel_calls_tail_recursive() {
55*c7a0c2d0SMatt Arsenault  call void @tail_recursive()
56*c7a0c2d0SMatt Arsenault  ret void
57*c7a0c2d0SMatt Arsenault}
58*c7a0c2d0SMatt Arsenault
59*c7a0c2d0SMatt Arsenault; CHECK-LABEL: {{^}}kernel_calls_tail_recursive_with_stack:
60*c7a0c2d0SMatt Arsenault; CHECK: .amdhsa_private_segment_fixed_size 16384{{$}}
61*c7a0c2d0SMatt Arsenaultdefine amdgpu_kernel void @kernel_calls_tail_recursive_with_stack() {
62*c7a0c2d0SMatt Arsenault  call void @tail_recursive_with_stack()
63*c7a0c2d0SMatt Arsenault  ret void
64*c7a0c2d0SMatt Arsenault}
65