@@ -17,15 +17,7 @@ define i32 @shl_cttz_i32(i32 %x, i32 %y) {
17
17
; RV32I: # %bb.0: # %entry
18
18
; RV32I-NEXT: neg a2, a1
19
19
; RV32I-NEXT: and a1, a1, a2
20
- ; RV32I-NEXT: lui a2, 30667
21
- ; RV32I-NEXT: addi a2, a2, 1329
22
- ; RV32I-NEXT: mul a1, a1, a2
23
- ; RV32I-NEXT: srli a1, a1, 27
24
- ; RV32I-NEXT: lui a2, %hi(.LCPI0_0)
25
- ; RV32I-NEXT: addi a2, a2, %lo(.LCPI0_0)
26
- ; RV32I-NEXT: add a1, a2, a1
27
- ; RV32I-NEXT: lbu a1, 0(a1)
28
- ; RV32I-NEXT: sll a0, a0, a1
20
+ ; RV32I-NEXT: mul a0, a1, a0
29
21
; RV32I-NEXT: ret
30
22
;
31
23
; RV32ZBB-LABEL: shl_cttz_i32:
@@ -34,26 +26,33 @@ define i32 @shl_cttz_i32(i32 %x, i32 %y) {
34
26
; RV32ZBB-NEXT: sll a0, a0, a1
35
27
; RV32ZBB-NEXT: ret
36
28
;
37
- ; RV64I -LABEL: shl_cttz_i32:
38
- ; RV64I : # %bb.0: # %entry
39
- ; RV64I -NEXT: negw a2, a1
40
- ; RV64I -NEXT: and a1, a1, a2
41
- ; RV64I -NEXT: lui a2, 30667
42
- ; RV64I -NEXT: addi a2, a2, 1329
43
- ; RV64I -NEXT: mul a1, a1, a2
44
- ; RV64I -NEXT: srliw a1, a1, 27
45
- ; RV64I -NEXT: lui a2, %hi(.LCPI0_0)
46
- ; RV64I -NEXT: addi a2, a2, %lo(.LCPI0_0)
47
- ; RV64I -NEXT: add a1, a2, a1
48
- ; RV64I -NEXT: lbu a1, 0(a1)
49
- ; RV64I -NEXT: sllw a0, a0, a1
50
- ; RV64I -NEXT: ret
29
+ ; RV64IILLEGALI32 -LABEL: shl_cttz_i32:
30
+ ; RV64IILLEGALI32 : # %bb.0: # %entry
31
+ ; RV64IILLEGALI32 -NEXT: negw a2, a1
32
+ ; RV64IILLEGALI32 -NEXT: and a1, a1, a2
33
+ ; RV64IILLEGALI32 -NEXT: lui a2, 30667
34
+ ; RV64IILLEGALI32 -NEXT: addi a2, a2, 1329
35
+ ; RV64IILLEGALI32 -NEXT: mul a1, a1, a2
36
+ ; RV64IILLEGALI32 -NEXT: srliw a1, a1, 27
37
+ ; RV64IILLEGALI32 -NEXT: lui a2, %hi(.LCPI0_0)
38
+ ; RV64IILLEGALI32 -NEXT: addi a2, a2, %lo(.LCPI0_0)
39
+ ; RV64IILLEGALI32 -NEXT: add a1, a2, a1
40
+ ; RV64IILLEGALI32 -NEXT: lbu a1, 0(a1)
41
+ ; RV64IILLEGALI32 -NEXT: sllw a0, a0, a1
42
+ ; RV64IILLEGALI32 -NEXT: ret
51
43
;
52
44
; RV64ZBB-LABEL: shl_cttz_i32:
53
45
; RV64ZBB: # %bb.0: # %entry
54
46
; RV64ZBB-NEXT: ctzw a1, a1
55
47
; RV64ZBB-NEXT: sllw a0, a0, a1
56
48
; RV64ZBB-NEXT: ret
49
+ ;
50
+ ; RV64ILEGALI32-LABEL: shl_cttz_i32:
51
+ ; RV64ILEGALI32: # %bb.0: # %entry
52
+ ; RV64ILEGALI32-NEXT: negw a2, a1
53
+ ; RV64ILEGALI32-NEXT: and a1, a1, a2
54
+ ; RV64ILEGALI32-NEXT: mulw a0, a1, a0
55
+ ; RV64ILEGALI32-NEXT: ret
57
56
entry:
58
57
%cttz = call i32 @llvm.cttz.i32 (i32 %y , i1 true )
59
58
%res = shl i32 %x , %cttz
@@ -65,16 +64,7 @@ define i32 @shl_cttz_constant_i32(i32 %y) {
65
64
; RV32I: # %bb.0: # %entry
66
65
; RV32I-NEXT: neg a1, a0
67
66
; RV32I-NEXT: and a0, a0, a1
68
- ; RV32I-NEXT: lui a1, 30667
69
- ; RV32I-NEXT: addi a1, a1, 1329
70
- ; RV32I-NEXT: mul a0, a0, a1
71
- ; RV32I-NEXT: srli a0, a0, 27
72
- ; RV32I-NEXT: lui a1, %hi(.LCPI1_0)
73
- ; RV32I-NEXT: addi a1, a1, %lo(.LCPI1_0)
74
- ; RV32I-NEXT: add a0, a1, a0
75
- ; RV32I-NEXT: lbu a0, 0(a0)
76
- ; RV32I-NEXT: li a1, 4
77
- ; RV32I-NEXT: sll a0, a1, a0
67
+ ; RV32I-NEXT: slli a0, a0, 2
78
68
; RV32I-NEXT: ret
79
69
;
80
70
; RV32ZBB-LABEL: shl_cttz_constant_i32:
@@ -84,28 +74,35 @@ define i32 @shl_cttz_constant_i32(i32 %y) {
84
74
; RV32ZBB-NEXT: sll a0, a1, a0
85
75
; RV32ZBB-NEXT: ret
86
76
;
87
- ; RV64I -LABEL: shl_cttz_constant_i32:
88
- ; RV64I : # %bb.0: # %entry
89
- ; RV64I -NEXT: negw a1, a0
90
- ; RV64I -NEXT: and a0, a0, a1
91
- ; RV64I -NEXT: lui a1, 30667
92
- ; RV64I -NEXT: addi a1, a1, 1329
93
- ; RV64I -NEXT: mul a0, a0, a1
94
- ; RV64I -NEXT: srliw a0, a0, 27
95
- ; RV64I -NEXT: lui a1, %hi(.LCPI1_0)
96
- ; RV64I -NEXT: addi a1, a1, %lo(.LCPI1_0)
97
- ; RV64I -NEXT: add a0, a1, a0
98
- ; RV64I -NEXT: lbu a0, 0(a0)
99
- ; RV64I -NEXT: li a1, 4
100
- ; RV64I -NEXT: sllw a0, a1, a0
101
- ; RV64I -NEXT: ret
77
+ ; RV64IILLEGALI32 -LABEL: shl_cttz_constant_i32:
78
+ ; RV64IILLEGALI32 : # %bb.0: # %entry
79
+ ; RV64IILLEGALI32 -NEXT: negw a1, a0
80
+ ; RV64IILLEGALI32 -NEXT: and a0, a0, a1
81
+ ; RV64IILLEGALI32 -NEXT: lui a1, 30667
82
+ ; RV64IILLEGALI32 -NEXT: addi a1, a1, 1329
83
+ ; RV64IILLEGALI32 -NEXT: mul a0, a0, a1
84
+ ; RV64IILLEGALI32 -NEXT: srliw a0, a0, 27
85
+ ; RV64IILLEGALI32 -NEXT: lui a1, %hi(.LCPI1_0)
86
+ ; RV64IILLEGALI32 -NEXT: addi a1, a1, %lo(.LCPI1_0)
87
+ ; RV64IILLEGALI32 -NEXT: add a0, a1, a0
88
+ ; RV64IILLEGALI32 -NEXT: lbu a0, 0(a0)
89
+ ; RV64IILLEGALI32 -NEXT: li a1, 4
90
+ ; RV64IILLEGALI32 -NEXT: sllw a0, a1, a0
91
+ ; RV64IILLEGALI32 -NEXT: ret
102
92
;
103
93
; RV64ZBB-LABEL: shl_cttz_constant_i32:
104
94
; RV64ZBB: # %bb.0: # %entry
105
95
; RV64ZBB-NEXT: ctzw a0, a0
106
96
; RV64ZBB-NEXT: li a1, 4
107
97
; RV64ZBB-NEXT: sllw a0, a1, a0
108
98
; RV64ZBB-NEXT: ret
99
+ ;
100
+ ; RV64ILEGALI32-LABEL: shl_cttz_constant_i32:
101
+ ; RV64ILEGALI32: # %bb.0: # %entry
102
+ ; RV64ILEGALI32-NEXT: negw a1, a0
103
+ ; RV64ILEGALI32-NEXT: and a0, a0, a1
104
+ ; RV64ILEGALI32-NEXT: slliw a0, a0, 2
105
+ ; RV64ILEGALI32-NEXT: ret
109
106
entry:
110
107
%cttz = call i32 @llvm.cttz.i32 (i32 %y , i1 true )
111
108
%res = shl i32 4 , %cttz
@@ -117,15 +114,7 @@ define i32 @shl_cttz_nuw_i32(i32 %x, i32 %y) {
117
114
; RV32I: # %bb.0: # %entry
118
115
; RV32I-NEXT: neg a2, a1
119
116
; RV32I-NEXT: and a1, a1, a2
120
- ; RV32I-NEXT: lui a2, 30667
121
- ; RV32I-NEXT: addi a2, a2, 1329
122
- ; RV32I-NEXT: mul a1, a1, a2
123
- ; RV32I-NEXT: srli a1, a1, 27
124
- ; RV32I-NEXT: lui a2, %hi(.LCPI2_0)
125
- ; RV32I-NEXT: addi a2, a2, %lo(.LCPI2_0)
126
- ; RV32I-NEXT: add a1, a2, a1
127
- ; RV32I-NEXT: lbu a1, 0(a1)
128
- ; RV32I-NEXT: sll a0, a0, a1
117
+ ; RV32I-NEXT: mul a0, a1, a0
129
118
; RV32I-NEXT: ret
130
119
;
131
120
; RV32ZBB-LABEL: shl_cttz_nuw_i32:
@@ -134,26 +123,33 @@ define i32 @shl_cttz_nuw_i32(i32 %x, i32 %y) {
134
123
; RV32ZBB-NEXT: sll a0, a0, a1
135
124
; RV32ZBB-NEXT: ret
136
125
;
137
- ; RV64I -LABEL: shl_cttz_nuw_i32:
138
- ; RV64I : # %bb.0: # %entry
139
- ; RV64I -NEXT: negw a2, a1
140
- ; RV64I -NEXT: and a1, a1, a2
141
- ; RV64I -NEXT: lui a2, 30667
142
- ; RV64I -NEXT: addi a2, a2, 1329
143
- ; RV64I -NEXT: mul a1, a1, a2
144
- ; RV64I -NEXT: srliw a1, a1, 27
145
- ; RV64I -NEXT: lui a2, %hi(.LCPI2_0)
146
- ; RV64I -NEXT: addi a2, a2, %lo(.LCPI2_0)
147
- ; RV64I -NEXT: add a1, a2, a1
148
- ; RV64I -NEXT: lbu a1, 0(a1)
149
- ; RV64I -NEXT: sllw a0, a0, a1
150
- ; RV64I -NEXT: ret
126
+ ; RV64IILLEGALI32 -LABEL: shl_cttz_nuw_i32:
127
+ ; RV64IILLEGALI32 : # %bb.0: # %entry
128
+ ; RV64IILLEGALI32 -NEXT: negw a2, a1
129
+ ; RV64IILLEGALI32 -NEXT: and a1, a1, a2
130
+ ; RV64IILLEGALI32 -NEXT: lui a2, 30667
131
+ ; RV64IILLEGALI32 -NEXT: addi a2, a2, 1329
132
+ ; RV64IILLEGALI32 -NEXT: mul a1, a1, a2
133
+ ; RV64IILLEGALI32 -NEXT: srliw a1, a1, 27
134
+ ; RV64IILLEGALI32 -NEXT: lui a2, %hi(.LCPI2_0)
135
+ ; RV64IILLEGALI32 -NEXT: addi a2, a2, %lo(.LCPI2_0)
136
+ ; RV64IILLEGALI32 -NEXT: add a1, a2, a1
137
+ ; RV64IILLEGALI32 -NEXT: lbu a1, 0(a1)
138
+ ; RV64IILLEGALI32 -NEXT: sllw a0, a0, a1
139
+ ; RV64IILLEGALI32 -NEXT: ret
151
140
;
152
141
; RV64ZBB-LABEL: shl_cttz_nuw_i32:
153
142
; RV64ZBB: # %bb.0: # %entry
154
143
; RV64ZBB-NEXT: ctzw a1, a1
155
144
; RV64ZBB-NEXT: sllw a0, a0, a1
156
145
; RV64ZBB-NEXT: ret
146
+ ;
147
+ ; RV64ILEGALI32-LABEL: shl_cttz_nuw_i32:
148
+ ; RV64ILEGALI32: # %bb.0: # %entry
149
+ ; RV64ILEGALI32-NEXT: negw a2, a1
150
+ ; RV64ILEGALI32-NEXT: and a1, a1, a2
151
+ ; RV64ILEGALI32-NEXT: mulw a0, a1, a0
152
+ ; RV64ILEGALI32-NEXT: ret
157
153
entry:
158
154
%cttz = call i32 @llvm.cttz.i32 (i32 %y , i1 true )
159
155
%res = shl nuw i32 %x , %cttz
@@ -165,15 +161,7 @@ define i32 @shl_cttz_nsw_i32(i32 %x, i32 %y) {
165
161
; RV32I: # %bb.0: # %entry
166
162
; RV32I-NEXT: neg a2, a1
167
163
; RV32I-NEXT: and a1, a1, a2
168
- ; RV32I-NEXT: lui a2, 30667
169
- ; RV32I-NEXT: addi a2, a2, 1329
170
- ; RV32I-NEXT: mul a1, a1, a2
171
- ; RV32I-NEXT: srli a1, a1, 27
172
- ; RV32I-NEXT: lui a2, %hi(.LCPI3_0)
173
- ; RV32I-NEXT: addi a2, a2, %lo(.LCPI3_0)
174
- ; RV32I-NEXT: add a1, a2, a1
175
- ; RV32I-NEXT: lbu a1, 0(a1)
176
- ; RV32I-NEXT: sll a0, a0, a1
164
+ ; RV32I-NEXT: mul a0, a1, a0
177
165
; RV32I-NEXT: ret
178
166
;
179
167
; RV32ZBB-LABEL: shl_cttz_nsw_i32:
@@ -182,26 +170,33 @@ define i32 @shl_cttz_nsw_i32(i32 %x, i32 %y) {
182
170
; RV32ZBB-NEXT: sll a0, a0, a1
183
171
; RV32ZBB-NEXT: ret
184
172
;
185
- ; RV64I -LABEL: shl_cttz_nsw_i32:
186
- ; RV64I : # %bb.0: # %entry
187
- ; RV64I -NEXT: negw a2, a1
188
- ; RV64I -NEXT: and a1, a1, a2
189
- ; RV64I -NEXT: lui a2, 30667
190
- ; RV64I -NEXT: addi a2, a2, 1329
191
- ; RV64I -NEXT: mul a1, a1, a2
192
- ; RV64I -NEXT: srliw a1, a1, 27
193
- ; RV64I -NEXT: lui a2, %hi(.LCPI3_0)
194
- ; RV64I -NEXT: addi a2, a2, %lo(.LCPI3_0)
195
- ; RV64I -NEXT: add a1, a2, a1
196
- ; RV64I -NEXT: lbu a1, 0(a1)
197
- ; RV64I -NEXT: sllw a0, a0, a1
198
- ; RV64I -NEXT: ret
173
+ ; RV64IILLEGALI32 -LABEL: shl_cttz_nsw_i32:
174
+ ; RV64IILLEGALI32 : # %bb.0: # %entry
175
+ ; RV64IILLEGALI32 -NEXT: negw a2, a1
176
+ ; RV64IILLEGALI32 -NEXT: and a1, a1, a2
177
+ ; RV64IILLEGALI32 -NEXT: lui a2, 30667
178
+ ; RV64IILLEGALI32 -NEXT: addi a2, a2, 1329
179
+ ; RV64IILLEGALI32 -NEXT: mul a1, a1, a2
180
+ ; RV64IILLEGALI32 -NEXT: srliw a1, a1, 27
181
+ ; RV64IILLEGALI32 -NEXT: lui a2, %hi(.LCPI3_0)
182
+ ; RV64IILLEGALI32 -NEXT: addi a2, a2, %lo(.LCPI3_0)
183
+ ; RV64IILLEGALI32 -NEXT: add a1, a2, a1
184
+ ; RV64IILLEGALI32 -NEXT: lbu a1, 0(a1)
185
+ ; RV64IILLEGALI32 -NEXT: sllw a0, a0, a1
186
+ ; RV64IILLEGALI32 -NEXT: ret
199
187
;
200
188
; RV64ZBB-LABEL: shl_cttz_nsw_i32:
201
189
; RV64ZBB: # %bb.0: # %entry
202
190
; RV64ZBB-NEXT: ctzw a1, a1
203
191
; RV64ZBB-NEXT: sllw a0, a0, a1
204
192
; RV64ZBB-NEXT: ret
193
+ ;
194
+ ; RV64ILEGALI32-LABEL: shl_cttz_nsw_i32:
195
+ ; RV64ILEGALI32: # %bb.0: # %entry
196
+ ; RV64ILEGALI32-NEXT: negw a2, a1
197
+ ; RV64ILEGALI32-NEXT: and a1, a1, a2
198
+ ; RV64ILEGALI32-NEXT: mulw a0, a1, a0
199
+ ; RV64ILEGALI32-NEXT: ret
205
200
entry:
206
201
%cttz = call i32 @llvm.cttz.i32 (i32 %y , i1 true )
207
202
%res = shl nsw i32 %x , %cttz
@@ -388,17 +383,9 @@ define i64 @shl_cttz_i64(i64 %x, i64 %y) {
388
383
;
389
384
; RV64I-LABEL: shl_cttz_i64:
390
385
; RV64I: # %bb.0: # %entry
391
- ; RV64I-NEXT: lui a2, %hi(.LCPI5_0)
392
- ; RV64I-NEXT: ld a2, %lo(.LCPI5_0)(a2)
393
- ; RV64I-NEXT: neg a3, a1
394
- ; RV64I-NEXT: and a1, a1, a3
395
- ; RV64I-NEXT: mul a1, a1, a2
396
- ; RV64I-NEXT: srli a1, a1, 58
397
- ; RV64I-NEXT: lui a2, %hi(.LCPI5_1)
398
- ; RV64I-NEXT: addi a2, a2, %lo(.LCPI5_1)
399
- ; RV64I-NEXT: add a1, a2, a1
400
- ; RV64I-NEXT: lbu a1, 0(a1)
401
- ; RV64I-NEXT: sll a0, a0, a1
386
+ ; RV64I-NEXT: neg a2, a1
387
+ ; RV64I-NEXT: and a1, a1, a2
388
+ ; RV64I-NEXT: mul a0, a1, a0
402
389
; RV64I-NEXT: ret
403
390
;
404
391
; RV64ZBB-LABEL: shl_cttz_i64:
@@ -481,18 +468,9 @@ define i64 @shl_cttz_constant_i64(i64 %y) {
481
468
;
482
469
; RV64I-LABEL: shl_cttz_constant_i64:
483
470
; RV64I: # %bb.0: # %entry
484
- ; RV64I-NEXT: lui a1, %hi(.LCPI6_0)
485
- ; RV64I-NEXT: ld a1, %lo(.LCPI6_0)(a1)
486
- ; RV64I-NEXT: neg a2, a0
487
- ; RV64I-NEXT: and a0, a0, a2
488
- ; RV64I-NEXT: mul a0, a0, a1
489
- ; RV64I-NEXT: srli a0, a0, 58
490
- ; RV64I-NEXT: lui a1, %hi(.LCPI6_1)
491
- ; RV64I-NEXT: addi a1, a1, %lo(.LCPI6_1)
492
- ; RV64I-NEXT: add a0, a1, a0
493
- ; RV64I-NEXT: lbu a0, 0(a0)
494
- ; RV64I-NEXT: li a1, 4
495
- ; RV64I-NEXT: sll a0, a1, a0
471
+ ; RV64I-NEXT: neg a1, a0
472
+ ; RV64I-NEXT: and a0, a0, a1
473
+ ; RV64I-NEXT: slli a0, a0, 2
496
474
; RV64I-NEXT: ret
497
475
;
498
476
; RV64ZBB-LABEL: shl_cttz_constant_i64:
@@ -578,17 +556,9 @@ define i64 @shl_cttz_nuw_i64(i64 %x, i64 %y) {
578
556
;
579
557
; RV64I-LABEL: shl_cttz_nuw_i64:
580
558
; RV64I: # %bb.0: # %entry
581
- ; RV64I-NEXT: lui a2, %hi(.LCPI7_0)
582
- ; RV64I-NEXT: ld a2, %lo(.LCPI7_0)(a2)
583
- ; RV64I-NEXT: neg a3, a1
584
- ; RV64I-NEXT: and a1, a1, a3
585
- ; RV64I-NEXT: mul a1, a1, a2
586
- ; RV64I-NEXT: srli a1, a1, 58
587
- ; RV64I-NEXT: lui a2, %hi(.LCPI7_1)
588
- ; RV64I-NEXT: addi a2, a2, %lo(.LCPI7_1)
589
- ; RV64I-NEXT: add a1, a2, a1
590
- ; RV64I-NEXT: lbu a1, 0(a1)
591
- ; RV64I-NEXT: sll a0, a0, a1
559
+ ; RV64I-NEXT: neg a2, a1
560
+ ; RV64I-NEXT: and a1, a1, a2
561
+ ; RV64I-NEXT: mul a0, a1, a0
592
562
; RV64I-NEXT: ret
593
563
;
594
564
; RV64ZBB-LABEL: shl_cttz_nuw_i64:
@@ -673,17 +643,9 @@ define i64 @shl_cttz_nsw_i64(i64 %x, i64 %y) {
673
643
;
674
644
; RV64I-LABEL: shl_cttz_nsw_i64:
675
645
; RV64I: # %bb.0: # %entry
676
- ; RV64I-NEXT: lui a2, %hi(.LCPI8_0)
677
- ; RV64I-NEXT: ld a2, %lo(.LCPI8_0)(a2)
678
- ; RV64I-NEXT: neg a3, a1
679
- ; RV64I-NEXT: and a1, a1, a3
680
- ; RV64I-NEXT: mul a1, a1, a2
681
- ; RV64I-NEXT: srli a1, a1, 58
682
- ; RV64I-NEXT: lui a2, %hi(.LCPI8_1)
683
- ; RV64I-NEXT: addi a2, a2, %lo(.LCPI8_1)
684
- ; RV64I-NEXT: add a1, a2, a1
685
- ; RV64I-NEXT: lbu a1, 0(a1)
686
- ; RV64I-NEXT: sll a0, a0, a1
646
+ ; RV64I-NEXT: neg a2, a1
647
+ ; RV64I-NEXT: and a1, a1, a2
648
+ ; RV64I-NEXT: mul a0, a1, a0
687
649
; RV64I-NEXT: ret
688
650
;
689
651
; RV64ZBB-LABEL: shl_cttz_nsw_i64:
@@ -866,7 +828,5 @@ entry:
866
828
declare void @use32 (i32 signext )
867
829
declare void @use64 (i64 )
868
830
;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
869
- ; RV64IILLEGALI32: {{.*}}
870
- ; RV64ILEGALI32: {{.*}}
871
831
; RV64ZBBILLEGALI32: {{.*}}
872
832
; RV64ZBBLEGALI32: {{.*}}
0 commit comments