1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
|
/* Global constant/copy propagation for RTL.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "diagnostic-core.h"
#include "toplev.h"
#include "rtl.h"
#include "tree.h"
#include "tm_p.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "flags.h"
#include "insn-config.h"
#include "recog.h"
#include "basic-block.h"
#include "function.h"
#include "expr.h"
#include "except.h"
#include "params.h"
#include "cselib.h"
#include "intl.h"
#include "obstack.h"
#include "tree-pass.h"
#include "hashtab.h"
#include "df.h"
#include "dbgcnt.h"
#include "target.h"
#include "cfgloop.h"
/* An obstack for our working variables. */
static struct obstack cprop_obstack;
/* Occurrence of an expression.
There is one per basic block. If a pattern appears more than once the
last appearance is used. */
struct occr
{
/* Next occurrence of this expression. */
struct occr *next;
/* The insn that computes the expression. */
rtx insn;
};
typedef struct occr *occr_t;
DEF_VEC_P (occr_t);
DEF_VEC_ALLOC_P (occr_t, heap);
/* Hash table entry for assignment expressions. */
struct expr
{
/* The expression (DEST := SRC). */
rtx dest;
rtx src;
/* Index in the available expression bitmaps. */
int bitmap_index;
/* Next entry with the same hash. */
struct expr *next_same_hash;
/* List of available occurrence in basic blocks in the function.
An "available occurrence" is one that is the last occurrence in the
basic block and whose operands are not modified by following statements
in the basic block [including this insn]. */
struct occr *avail_occr;
};
/* Hash table for copy propagation expressions.
Each hash table is an array of buckets.
??? It is known that if it were an array of entries, structure elements
`next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
not clear whether in the final analysis a sufficient amount of memory would
be saved as the size of the available expression bitmaps would be larger
[one could build a mapping table without holes afterwards though].
Someday I'll perform the computation and figure it out. */
struct hash_table_d
{
/* The table itself.
This is an array of `set_hash_table_size' elements. */
struct expr **table;
/* Size of the hash table, in elements. */
unsigned int size;
/* Number of hash table elements. */
unsigned int n_elems;
};
/* Copy propagation hash table. */
static struct hash_table_d set_hash_table;
/* Array of implicit set patterns indexed by basic block index. */
static rtx *implicit_sets;
/* Array of indexes of expressions for implicit set patterns indexed by basic
block index. In other words, implicit_set_indexes[i] is the bitmap_index
of the expression whose RTX is implicit_sets[i]. */
static int *implicit_set_indexes;
/* Bitmap containing one bit for each register in the program.
Used when performing GCSE to track which registers have been set since
the start or end of the basic block while traversing that block. */
static regset reg_set_bitmap;
/* Various variables for statistics gathering. */
/* Memory used in a pass.
This isn't intended to be absolutely precise. Its intent is only
to keep an eye on memory usage. */
static int bytes_used;
/* Number of local constants propagated. */
static int local_const_prop_count;
/* Number of local copies propagated. */
static int local_copy_prop_count;
/* Number of global constants propagated. */
static int global_const_prop_count;
/* Number of global copies propagated. */
static int global_copy_prop_count;
#define GOBNEW(T) ((T *) cprop_alloc (sizeof (T)))
#define GOBNEWVAR(T, S) ((T *) cprop_alloc ((S)))
/* Cover function to obstack_alloc. */
static void *
cprop_alloc (unsigned long size)
{
bytes_used += size;
return obstack_alloc (&cprop_obstack, size);
}
/* Return nonzero if register X is unchanged from INSN to the end
of INSN's basic block. */
static int
reg_available_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
{
return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
}
/* Hash a set of register REGNO.
Sets are hashed on the register that is set. This simplifies the PRE copy
propagation code.
??? May need to make things more elaborate. Later, as necessary. */
static unsigned int
hash_set (int regno, int hash_table_size)
{
unsigned int hash;
hash = regno;
return hash % hash_table_size;
}
/* Insert assignment DEST:=SET from INSN in the hash table.
DEST is a register and SET is a register or a suitable constant.
If the assignment is already present in the table, record it as
the last occurrence in INSN's basic block.
IMPLICIT is true if it's an implicit set, false otherwise. */
static void
insert_set_in_table (rtx dest, rtx src, rtx insn, struct hash_table_d *table,
bool implicit)
{
bool found = false;
unsigned int hash;
struct expr *cur_expr, *last_expr = NULL;
struct occr *cur_occr;
hash = hash_set (REGNO (dest), table->size);
for (cur_expr = table->table[hash]; cur_expr;
cur_expr = cur_expr->next_same_hash)
{
if (dest == cur_expr->dest
&& src == cur_expr->src)
{
found = true;
break;
}
last_expr = cur_expr;
}
if (! found)
{
cur_expr = GOBNEW (struct expr);
bytes_used += sizeof (struct expr);
if (table->table[hash] == NULL)
/* This is the first pattern that hashed to this index. */
table->table[hash] = cur_expr;
else
/* Add EXPR to end of this hash chain. */
last_expr->next_same_hash = cur_expr;
/* Set the fields of the expr element.
We must copy X because it can be modified when copy propagation is
performed on its operands. */
cur_expr->dest = copy_rtx (dest);
cur_expr->src = copy_rtx (src);
cur_expr->bitmap_index = table->n_elems++;
cur_expr->next_same_hash = NULL;
cur_expr->avail_occr = NULL;
}
/* Now record the occurrence. */
cur_occr = cur_expr->avail_occr;
if (cur_occr
&& BLOCK_FOR_INSN (cur_occr->insn) == BLOCK_FOR_INSN (insn))
{
/* Found another instance of the expression in the same basic block.
Prefer this occurrence to the currently recorded one. We want
the last one in the block and the block is scanned from start
to end. */
cur_occr->insn = insn;
}
else
{
/* First occurrence of this expression in this basic block. */
cur_occr = GOBNEW (struct occr);
bytes_used += sizeof (struct occr);
cur_occr->insn = insn;
cur_occr->next = cur_expr->avail_occr;
cur_expr->avail_occr = cur_occr;
}
/* Record bitmap_index of the implicit set in implicit_set_indexes. */
if (implicit)
implicit_set_indexes[BLOCK_FOR_INSN(insn)->index] = cur_expr->bitmap_index;
}
/* Determine whether the rtx X should be treated as a constant for CPROP.
Since X might be inserted more than once we have to take care that it
is sharable. */
static bool
cprop_constant_p (const_rtx x)
{
return CONSTANT_P (x) && (GET_CODE (x) != CONST || shared_const_p (x));
}
/* Scan SET present in INSN and add an entry to the hash TABLE.
IMPLICIT is true if it's an implicit set, false otherwise. */
static void
hash_scan_set (rtx set, rtx insn, struct hash_table_d *table, bool implicit)
{
rtx src = SET_SRC (set);
rtx dest = SET_DEST (set);
if (REG_P (dest)
&& ! HARD_REGISTER_P (dest)
&& reg_available_p (dest, insn)
&& can_copy_p (GET_MODE (dest)))
{
/* See if a REG_EQUAL note shows this equivalent to a simpler expression.
This allows us to do a single CPROP pass and still eliminate
redundant constants, addresses or other expressions that are
constructed with multiple instructions.
However, keep the original SRC if INSN is a simple reg-reg move. In
In this case, there will almost always be a REG_EQUAL note on the
insn that sets SRC. By recording the REG_EQUAL value here as SRC
for INSN, we miss copy propagation opportunities.
Note that this does not impede profitable constant propagations. We
"look through" reg-reg sets in lookup_set. */
rtx note = find_reg_equal_equiv_note (insn);
if (note != 0
&& REG_NOTE_KIND (note) == REG_EQUAL
&& !REG_P (src)
&& cprop_constant_p (XEXP (note, 0)))
src = XEXP (note, 0), set = gen_rtx_SET (VOIDmode, dest, src);
/* Record sets for constant/copy propagation. */
if ((REG_P (src)
&& src != dest
&& ! HARD_REGISTER_P (src)
&& reg_available_p (src, insn))
|| cprop_constant_p (src))
insert_set_in_table (dest, src, insn, table, implicit);
}
}
/* Process INSN and add hash table entries as appropriate. */
static void
hash_scan_insn (rtx insn, struct hash_table_d *table)
{
rtx pat = PATTERN (insn);
int i;
/* Pick out the sets of INSN and for other forms of instructions record
what's been modified. */
if (GET_CODE (pat) == SET)
hash_scan_set (pat, insn, table, false);
else if (GET_CODE (pat) == PARALLEL)
for (i = 0; i < XVECLEN (pat, 0); i++)
{
rtx x = XVECEXP (pat, 0, i);
if (GET_CODE (x) == SET)
hash_scan_set (x, insn, table, false);
}
}
/* Dump the hash table TABLE to file FILE under the name NAME. */
static void
dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
{
int i;
/* Flattened out table, so it's printed in proper order. */
struct expr **flat_table;
unsigned int *hash_val;
struct expr *expr;
flat_table = XCNEWVEC (struct expr *, table->n_elems);
hash_val = XNEWVEC (unsigned int, table->n_elems);
for (i = 0; i < (int) table->size; i++)
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
{
flat_table[expr->bitmap_index] = expr;
hash_val[expr->bitmap_index] = i;
}
fprintf (file, "%s hash table (%d buckets, %d entries)\n",
name, table->size, table->n_elems);
for (i = 0; i < (int) table->n_elems; i++)
if (flat_table[i] != 0)
{
expr = flat_table[i];
fprintf (file, "Index %d (hash value %d)\n ",
expr->bitmap_index, hash_val[i]);
print_rtl (file, expr->dest);
fprintf (file, " := ");
print_rtl (file, expr->src);
fprintf (file, "\n");
}
fprintf (file, "\n");
free (flat_table);
free (hash_val);
}
/* Record as unavailable all registers that are DEF operands of INSN. */
static void
make_set_regs_unavailable (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *def_rec;
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
}
/* Top level function to create an assignment hash table.
Assignment entries are placed in the hash table if
- they are of the form (set (pseudo-reg) src),
- src is something we want to perform const/copy propagation on,
- none of the operands or target are subsequently modified in the block
Currently src must be a pseudo-reg or a const_int.
TABLE is the table computed. */
static void
compute_hash_table_work (struct hash_table_d *table)
{
basic_block bb;
/* Allocate vars to track sets of regs. */
reg_set_bitmap = ALLOC_REG_SET (NULL);
FOR_EACH_BB (bb)
{
rtx insn;
/* Reset tables used to keep track of what's not yet invalid [since
the end of the block]. */
CLEAR_REG_SET (reg_set_bitmap);
/* Go over all insns from the last to the first. This is convenient
for tracking available registers, i.e. not set between INSN and
the end of the basic block BB. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
/* Only real insns are interesting. */
if (!NONDEBUG_INSN_P (insn))
continue;
/* Record interesting sets from INSN in the hash table. */
hash_scan_insn (insn, table);
/* Any registers set in INSN will make SETs above it not AVAIL. */
make_set_regs_unavailable (insn);
}
/* Insert implicit sets in the hash table, pretending they appear as
insns at the head of the basic block. */
if (implicit_sets[bb->index] != NULL_RTX)
hash_scan_set (implicit_sets[bb->index], BB_HEAD (bb), table, true);
}
FREE_REG_SET (reg_set_bitmap);
}
/* Allocate space for the set/expr hash TABLE.
It is used to determine the number of buckets to use. */
static void
alloc_hash_table (struct hash_table_d *table)
{
int n;
n = get_max_insn_count ();
table->size = n / 4;
if (table->size < 11)
table->size = 11;
/* Attempt to maintain efficient use of hash table.
Making it an odd number is simplest for now.
??? Later take some measurements. */
table->size |= 1;
n = table->size * sizeof (struct expr *);
table->table = XNEWVAR (struct expr *, n);
}
/* Free things allocated by alloc_hash_table. */
static void
free_hash_table (struct hash_table_d *table)
{
free (table->table);
}
/* Compute the hash TABLE for doing copy/const propagation or
expression hash table. */
static void
compute_hash_table (struct hash_table_d *table)
{
/* Initialize count of number of entries in hash table. */
table->n_elems = 0;
memset (table->table, 0, table->size * sizeof (struct expr *));
compute_hash_table_work (table);
}
/* Expression tracking support. */
/* Lookup REGNO in the set TABLE. The result is a pointer to the
table entry, or NULL if not found. */
static struct expr *
lookup_set (unsigned int regno, struct hash_table_d *table)
{
unsigned int hash = hash_set (regno, table->size);
struct expr *expr;
expr = table->table[hash];
while (expr && REGNO (expr->dest) != regno)
expr = expr->next_same_hash;
return expr;
}
/* Return the next entry for REGNO in list EXPR. */
static struct expr *
next_set (unsigned int regno, struct expr *expr)
{
do
expr = expr->next_same_hash;
while (expr && REGNO (expr->dest) != regno);
return expr;
}
/* Reset tables used to keep track of what's still available [since the
start of the block]. */
static void
reset_opr_set_tables (void)
{
/* Maintain a bitmap of which regs have been set since beginning of
the block. */
CLEAR_REG_SET (reg_set_bitmap);
}
/* Return nonzero if the register X has not been set yet [since the
start of the basic block containing INSN]. */
static int
reg_not_set_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
{
return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
}
/* Record things set by INSN.
This data is used by reg_not_set_p. */
static void
mark_oprs_set (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *def_rec;
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
}
/* Compute copy/constant propagation working variables. */
/* Local properties of assignments. */
static sbitmap *cprop_avloc;
static sbitmap *cprop_kill;
/* Global properties of assignments (computed from the local properties). */
static sbitmap *cprop_avin;
static sbitmap *cprop_avout;
/* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
basic blocks. N_SETS is the number of sets. */
static void
alloc_cprop_mem (int n_blocks, int n_sets)
{
cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets);
cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets);
cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
}
/* Free vars used by copy/const propagation. */
static void
free_cprop_mem (void)
{
sbitmap_vector_free (cprop_avloc);
sbitmap_vector_free (cprop_kill);
sbitmap_vector_free (cprop_avin);
sbitmap_vector_free (cprop_avout);
}
/* Compute the local properties of each recorded expression.
Local properties are those that are defined by the block, irrespective of
other blocks.
An expression is killed in a block if its operands, either DEST or SRC, are
modified in the block.
An expression is computed (locally available) in a block if it is computed
at least once and expression would contain the same value if the
computation was moved to the end of the block.
KILL and COMP are destination sbitmaps for recording local properties. */
static void
compute_local_properties (sbitmap *kill, sbitmap *comp,
struct hash_table_d *table)
{
unsigned int i;
/* Initialize the bitmaps that were passed in. */
sbitmap_vector_zero (kill, last_basic_block);
sbitmap_vector_zero (comp, last_basic_block);
for (i = 0; i < table->size; i++)
{
struct expr *expr;
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
{
int indx = expr->bitmap_index;
df_ref def;
struct occr *occr;
/* For each definition of the destination pseudo-reg, the expression
is killed in the block where the definition is. */
for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest));
def; def = DF_REF_NEXT_REG (def))
SET_BIT (kill[DF_REF_BB (def)->index], indx);
/* If the source is a pseudo-reg, for each definition of the source,
the expression is killed in the block where the definition is. */
if (REG_P (expr->src))
for (def = DF_REG_DEF_CHAIN (REGNO (expr->src));
def; def = DF_REF_NEXT_REG (def))
SET_BIT (kill[DF_REF_BB (def)->index], indx);
/* The occurrences recorded in avail_occr are exactly those that
are locally available in the block where they are. */
for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
{
SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
}
}
}
}
/* Hash table support. */
/* Top level routine to do the dataflow analysis needed by copy/const
propagation. */
static void
compute_cprop_data (void)
{
basic_block bb;
compute_local_properties (cprop_kill, cprop_avloc, &set_hash_table);
compute_available (cprop_avloc, cprop_kill, cprop_avout, cprop_avin);
/* Merge implicit sets into CPROP_AVIN. They are always available at the
entry of their basic block. We need to do this because 1) implicit sets
aren't recorded for the local pass so they cannot be propagated within
their basic block by this pass and 2) the global pass would otherwise
propagate them only in the successors of their basic block. */
FOR_EACH_BB (bb)
{
int index = implicit_set_indexes[bb->index];
if (index != -1)
SET_BIT (cprop_avin[bb->index], index);
}
}
/* Copy/constant propagation. */
/* Maximum number of register uses in an insn that we handle. */
#define MAX_USES 8
/* Table of uses (registers, both hard and pseudo) found in an insn.
Allocated statically to avoid alloc/free complexity and overhead. */
static rtx reg_use_table[MAX_USES];
/* Index into `reg_use_table' while building it. */
static unsigned reg_use_count;
/* Set up a list of register numbers used in INSN. The found uses are stored
in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
and contains the number of uses in the table upon exit.
??? If a register appears multiple times we will record it multiple times.
This doesn't hurt anything but it will slow things down. */
static void
find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
{
int i, j;
enum rtx_code code;
const char *fmt;
rtx x = *xptr;
/* repeat is used to turn tail-recursion into iteration since GCC
can't do it when there's no return value. */
repeat:
if (x == 0)
return;
code = GET_CODE (x);
if (REG_P (x))
{
if (reg_use_count == MAX_USES)
return;
reg_use_table[reg_use_count] = x;
reg_use_count++;
}
/* Recursively scan the operands of this expression. */
for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
{
if (fmt[i] == 'e')
{
/* If we are about to do the last recursive call
needed at this level, change it into iteration.
This function is called enough to be worth it. */
if (i == 0)
{
x = XEXP (x, 0);
goto repeat;
}
find_used_regs (&XEXP (x, i), data);
}
else if (fmt[i] == 'E')
for (j = 0; j < XVECLEN (x, i); j++)
find_used_regs (&XVECEXP (x, i, j), data);
}
}
/* Try to replace all uses of FROM in INSN with TO.
Return nonzero if successful. */
static int
try_replace_reg (rtx from, rtx to, rtx insn)
{
rtx note = find_reg_equal_equiv_note (insn);
rtx src = 0;
int success = 0;
rtx set = single_set (insn);
/* Usually we substitute easy stuff, so we won't copy everything.
We however need to take care to not duplicate non-trivial CONST
expressions. */
to = copy_rtx (to);
validate_replace_src_group (from, to, insn);
if (num_changes_pending () && apply_change_group ())
success = 1;
/* Try to simplify SET_SRC if we have substituted a constant. */
if (success && set && CONSTANT_P (to))
{
src = simplify_rtx (SET_SRC (set));
if (src)
validate_change (insn, &SET_SRC (set), src, 0);
}
/* If there is already a REG_EQUAL note, update the expression in it
with our replacement. */
if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
set_unique_reg_note (insn, REG_EQUAL,
simplify_replace_rtx (XEXP (note, 0), from, to));
if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
{
/* If above failed and this is a single set, try to simplify the source
of the set given our substitution. We could perhaps try this for
multiple SETs, but it probably won't buy us anything. */
src = simplify_replace_rtx (SET_SRC (set), from, to);
if (!rtx_equal_p (src, SET_SRC (set))
&& validate_change (insn, &SET_SRC (set), src, 0))
success = 1;
/* If we've failed perform the replacement, have a single SET to
a REG destination and don't yet have a note, add a REG_EQUAL note
to not lose information. */
if (!success && note == 0 && set != 0 && REG_P (SET_DEST (set)))
note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
}
if (set && MEM_P (SET_DEST (set)) && reg_mentioned_p (from, SET_DEST (set)))
{
/* Registers can also appear as uses in SET_DEST if it is a MEM.
We could perhaps try this for multiple SETs, but it probably
won't buy us anything. */
rtx dest = simplify_replace_rtx (SET_DEST (set), from, to);
if (!rtx_equal_p (dest, SET_DEST (set))
&& validate_change (insn, &SET_DEST (set), dest, 0))
success = 1;
}
/* REG_EQUAL may get simplified into register.
We don't allow that. Remove that note. This code ought
not to happen, because previous code ought to synthesize
reg-reg move, but be on the safe side. */
if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
remove_note (insn, note);
return success;
}
/* Find a set of REGNOs that are available on entry to INSN's block. Return
NULL no such set is found. */
static struct expr *
find_avail_set (int regno, rtx insn)
{
/* SET1 contains the last set found that can be returned to the caller for
use in a substitution. */
struct expr *set1 = 0;
/* Loops are not possible here. To get a loop we would need two sets
available at the start of the block containing INSN. i.e. we would
need two sets like this available at the start of the block:
(set (reg X) (reg Y))
(set (reg Y) (reg X))
This can not happen since the set of (reg Y) would have killed the
set of (reg X) making it unavailable at the start of this block. */
while (1)
{
rtx src;
struct expr *set = lookup_set (regno, &set_hash_table);
/* Find a set that is available at the start of the block
which contains INSN. */
while (set)
{
if (TEST_BIT (cprop_avin[BLOCK_FOR_INSN (insn)->index],
set->bitmap_index))
break;
set = next_set (regno, set);
}
/* If no available set was found we've reached the end of the
(possibly empty) copy chain. */
if (set == 0)
break;
src = set->src;
/* We know the set is available.
Now check that SRC is locally anticipatable (i.e. none of the
source operands have changed since the start of the block).
If the source operand changed, we may still use it for the next
iteration of this loop, but we may not use it for substitutions. */
if (cprop_constant_p (src) || reg_not_set_p (src, insn))
set1 = set;
/* If the source of the set is anything except a register, then
we have reached the end of the copy chain. */
if (! REG_P (src))
break;
/* Follow the copy chain, i.e. start another iteration of the loop
and see if we have an available copy into SRC. */
regno = REGNO (src);
}
/* SET1 holds the last set that was available and anticipatable at
INSN. */
return set1;
}
/* Subroutine of cprop_insn that tries to propagate constants into
JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
it is the instruction that immediately precedes JUMP, and must be a
single SET of a register. FROM is what we will try to replace,
SRC is the constant we will try to substitute for it. Return nonzero
if a change was made. */
static int
cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
{
rtx new_rtx, set_src, note_src;
rtx set = pc_set (jump);
rtx note = find_reg_equal_equiv_note (jump);
if (note)
{
note_src = XEXP (note, 0);
if (GET_CODE (note_src) == EXPR_LIST)
note_src = NULL_RTX;
}
else note_src = NULL_RTX;
/* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
set_src = note_src ? note_src : SET_SRC (set);
/* First substitute the SETCC condition into the JUMP instruction,
then substitute that given values into this expanded JUMP. */
if (setcc != NULL_RTX
&& !modified_between_p (from, setcc, jump)
&& !modified_between_p (src, setcc, jump))
{
rtx setcc_src;
rtx setcc_set = single_set (setcc);
rtx setcc_note = find_reg_equal_equiv_note (setcc);
setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
setcc_src);
}
else
setcc = NULL_RTX;
new_rtx = simplify_replace_rtx (set_src, from, src);
/* If no simplification can be made, then try the next register. */
if (rtx_equal_p (new_rtx, SET_SRC (set)))
return 0;
/* If this is now a no-op delete it, otherwise this must be a valid insn. */
if (new_rtx == pc_rtx)
delete_insn (jump);
else
{
/* Ensure the value computed inside the jump insn to be equivalent
to one computed by setcc. */
if (setcc && modified_in_p (new_rtx, setcc))
return 0;
if (! validate_unshare_change (jump, &SET_SRC (set), new_rtx, 0))
{
/* When (some) constants are not valid in a comparison, and there
are two registers to be replaced by constants before the entire
comparison can be folded into a constant, we need to keep
intermediate information in REG_EQUAL notes. For targets with
separate compare insns, such notes are added by try_replace_reg.
When we have a combined compare-and-branch instruction, however,
we need to attach a note to the branch itself to make this
optimization work. */
if (!rtx_equal_p (new_rtx, note_src))
set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new_rtx));
return 0;
}
/* Remove REG_EQUAL note after simplification. */
if (note_src)
remove_note (jump, note);
}
#ifdef HAVE_cc0
/* Delete the cc0 setter. */
if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
delete_insn (setcc);
#endif
global_const_prop_count++;
if (dump_file != NULL)
{
fprintf (dump_file,
"GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with"
"constant ", REGNO (from), INSN_UID (jump));
print_rtl (dump_file, src);
fprintf (dump_file, "\n");
}
purge_dead_edges (bb);
/* If a conditional jump has been changed into unconditional jump, remove
the jump and make the edge fallthru - this is always called in
cfglayout mode. */
if (new_rtx != pc_rtx && simplejump_p (jump))
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR
&& BB_HEAD (e->dest) == JUMP_LABEL (jump))
{
e->flags |= EDGE_FALLTHRU;
break;
}
delete_insn (jump);
}
return 1;
}
/* Subroutine of cprop_insn that tries to propagate constants. FROM is what
we will try to replace, SRC is the constant we will try to substitute for
it and INSN is the instruction where this will be happening. */
static int
constprop_register (rtx from, rtx src, rtx insn)
{
rtx sset;
/* Check for reg or cc0 setting instructions followed by
conditional branch instructions first. */
if ((sset = single_set (insn)) != NULL
&& NEXT_INSN (insn)
&& any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
{
rtx dest = SET_DEST (sset);
if ((REG_P (dest) || CC0_P (dest))
&& cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn),
from, src))
return 1;
}
/* Handle normal insns next. */
if (NONJUMP_INSN_P (insn) && try_replace_reg (from, src, insn))
return 1;
/* Try to propagate a CONST_INT into a conditional jump.
We're pretty specific about what we will handle in this
code, we can extend this as necessary over time.
Right now the insn in question must look like
(set (pc) (if_then_else ...)) */
else if (any_condjump_p (insn) && onlyjump_p (insn))
return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, src);
return 0;
}
/* Perform constant and copy propagation on INSN.
Return nonzero if a change was made. */
static int
cprop_insn (rtx insn)
{
unsigned i;
int changed = 0, changed_this_round;
rtx note;
retry:
changed_this_round = 0;
reg_use_count = 0;
note_uses (&PATTERN (insn), find_used_regs, NULL);
/* We may win even when propagating constants into notes. */
note = find_reg_equal_equiv_note (insn);
if (note)
find_used_regs (&XEXP (note, 0), NULL);
for (i = 0; i < reg_use_count; i++)
{
rtx reg_used = reg_use_table[i];
unsigned int regno = REGNO (reg_used);
rtx src;
struct expr *set;
/* If the register has already been set in this block, there's
nothing we can do. */
if (! reg_not_set_p (reg_used, insn))
continue;
/* Find an assignment that sets reg_used and is available
at the start of the block. */
set = find_avail_set (regno, insn);
if (! set)
continue;
src = set->src;
/* Constant propagation. */
if (cprop_constant_p (src))
{
if (constprop_register (reg_used, src, insn))
{
changed_this_round = changed = 1;
global_const_prop_count++;
if (dump_file != NULL)
{
fprintf (dump_file,
"GLOBAL CONST-PROP: Replacing reg %d in ", regno);
fprintf (dump_file, "insn %d with constant ",
INSN_UID (insn));
print_rtl (dump_file, src);
fprintf (dump_file, "\n");
}
if (INSN_DELETED_P (insn))
return 1;
}
}
else if (REG_P (src)
&& REGNO (src) >= FIRST_PSEUDO_REGISTER
&& REGNO (src) != regno)
{
if (try_replace_reg (reg_used, src, insn))
{
changed_this_round = changed = 1;
global_copy_prop_count++;
if (dump_file != NULL)
{
fprintf (dump_file,
"GLOBAL COPY-PROP: Replacing reg %d in insn %d",
regno, INSN_UID (insn));
fprintf (dump_file, " with reg %d\n", REGNO (src));
}
/* The original insn setting reg_used may or may not now be
deletable. We leave the deletion to DCE. */
/* FIXME: If it turns out that the insn isn't deletable,
then we may have unnecessarily extended register lifetimes
and made things worse. */
}
}
/* If try_replace_reg simplified the insn, the regs found
by find_used_regs may not be valid anymore. Start over. */
if (changed_this_round)
goto retry;
}
if (changed && DEBUG_INSN_P (insn))
return 0;
return changed;
}
/* Like find_used_regs, but avoid recording uses that appear in
input-output contexts such as zero_extract or pre_dec. This
restricts the cases we consider to those for which local cprop
can legitimately make replacements. */
static void
local_cprop_find_used_regs (rtx *xptr, void *data)
{
rtx x = *xptr;
if (x == 0)
return;
switch (GET_CODE (x))
{
case ZERO_EXTRACT:
case SIGN_EXTRACT:
case STRICT_LOW_PART:
return;
case PRE_DEC:
case PRE_INC:
case POST_DEC:
case POST_INC:
case PRE_MODIFY:
case POST_MODIFY:
/* Can only legitimately appear this early in the context of
stack pushes for function arguments, but handle all of the
codes nonetheless. */
return;
case SUBREG:
/* Setting a subreg of a register larger than word_mode leaves
the non-written words unchanged. */
if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
return;
break;
default:
break;
}
find_used_regs (xptr, data);
}
/* Try to perform local const/copy propagation on X in INSN. */
static bool
do_local_cprop (rtx x, rtx insn)
{
rtx newreg = NULL, newcnst = NULL;
/* Rule out USE instructions and ASM statements as we don't want to
change the hard registers mentioned. */
if (REG_P (x)
&& (REGNO (x) >= FIRST_PSEUDO_REGISTER
|| (GET_CODE (PATTERN (insn)) != USE
&& asm_noperands (PATTERN (insn)) < 0)))
{
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
struct elt_loc_list *l;
if (!val)
return false;
for (l = val->locs; l; l = l->next)
{
rtx this_rtx = l->loc;
rtx note;
if (cprop_constant_p (this_rtx))
newcnst = this_rtx;
if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
/* Don't copy propagate if it has attached REG_EQUIV note.
At this point this only function parameters should have
REG_EQUIV notes and if the argument slot is used somewhere
explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */
&& (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
|| ! MEM_P (XEXP (note, 0))))
newreg = this_rtx;
}
if (newcnst && constprop_register (x, newcnst, insn))
{
if (dump_file != NULL)
{
fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
REGNO (x));
fprintf (dump_file, "insn %d with constant ",
INSN_UID (insn));
print_rtl (dump_file, newcnst);
fprintf (dump_file, "\n");
}
local_const_prop_count++;
return true;
}
else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
{
if (dump_file != NULL)
{
fprintf (dump_file,
"LOCAL COPY-PROP: Replacing reg %d in insn %d",
REGNO (x), INSN_UID (insn));
fprintf (dump_file, " with reg %d\n", REGNO (newreg));
}
local_copy_prop_count++;
return true;
}
}
return false;
}
/* Do local const/copy propagation (i.e. within each basic block). */
static int
local_cprop_pass (void)
{
basic_block bb;
rtx insn;
bool changed = false;
unsigned i;
cselib_init (0);
FOR_EACH_BB (bb)
{
FOR_BB_INSNS (bb, insn)
{
if (INSN_P (insn))
{
rtx note = find_reg_equal_equiv_note (insn);
do
{
reg_use_count = 0;
note_uses (&PATTERN (insn), local_cprop_find_used_regs,
NULL);
if (note)
local_cprop_find_used_regs (&XEXP (note, 0), NULL);
for (i = 0; i < reg_use_count; i++)
{
if (do_local_cprop (reg_use_table[i], insn))
{
if (!DEBUG_INSN_P (insn))
changed = true;
break;
}
}
if (INSN_DELETED_P (insn))
break;
}
while (i < reg_use_count);
}
cselib_process_insn (insn);
}
/* Forget everything at the end of a basic block. */
cselib_clear_table ();
}
cselib_finish ();
return changed;
}
/* Similar to get_condition, only the resulting condition must be
valid at JUMP, instead of at EARLIEST.
This differs from noce_get_condition in ifcvt.c in that we prefer not to
settle for the condition variable in the jump instruction being integral.
We prefer to be able to record the value of a user variable, rather than
the value of a temporary used in a condition. This could be solved by
recording the value of *every* register scanned by canonicalize_condition,
but this would require some code reorganization. */
rtx
fis_get_condition (rtx jump)
{
return get_condition (jump, NULL, false, true);
}
/* Check the comparison COND to see if we can safely form an implicit
set from it. */
static bool
implicit_set_cond_p (const_rtx cond)
{
enum machine_mode mode;
rtx cst;
/* COND must be either an EQ or NE comparison. */
if (GET_CODE (cond) != EQ && GET_CODE (cond) != NE)
return false;
/* The first operand of COND must be a pseudo-reg. */
if (! REG_P (XEXP (cond, 0))
|| HARD_REGISTER_P (XEXP (cond, 0)))
return false;
/* The second operand of COND must be a suitable constant. */
mode = GET_MODE (XEXP (cond, 0));
cst = XEXP (cond, 1);
/* We can't perform this optimization if either operand might be or might
contain a signed zero. */
if (HONOR_SIGNED_ZEROS (mode))
{
/* It is sufficient to check if CST is or contains a zero. We must
handle float, complex, and vector. If any subpart is a zero, then
the optimization can't be performed. */
/* ??? The complex and vector checks are not implemented yet. We just
always return zero for them. */
if (GET_CODE (cst) == CONST_DOUBLE)
{
REAL_VALUE_TYPE d;
REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
if (REAL_VALUES_EQUAL (d, dconst0))
return 0;
}
else
return 0;
}
return cprop_constant_p (cst);
}
/* Find the implicit sets of a function. An "implicit set" is a constraint
on the value of a variable, implied by a conditional jump. For example,
following "if (x == 2)", the then branch may be optimized as though the
conditional performed an "explicit set", in this example, "x = 2". This
function records the set patterns that are implicit at the start of each
basic block.
If an implicit set is found but the set is implicit on a critical edge,
this critical edge is split.
Return true if the CFG was modified, false otherwise. */
static bool
find_implicit_sets (void)
{
basic_block bb, dest;
rtx cond, new_rtx;
unsigned int count = 0;
bool edges_split = false;
size_t implicit_sets_size = last_basic_block + 10;
implicit_sets = XCNEWVEC (rtx, implicit_sets_size);
FOR_EACH_BB (bb)
{
/* Check for more than one successor. */
if (EDGE_COUNT (bb->succs) <= 1)
continue;
cond = fis_get_condition (BB_END (bb));
/* If no condition is found or if it isn't of a suitable form,
ignore it. */
if (! cond || ! implicit_set_cond_p (cond))
continue;
dest = GET_CODE (cond) == EQ
? BRANCH_EDGE (bb)->dest : FALLTHRU_EDGE (bb)->dest;
/* If DEST doesn't go anywhere, ignore it. */
if (! dest || dest == EXIT_BLOCK_PTR)
continue;
/* We have found a suitable implicit set. Try to record it now as
a SET in DEST. If DEST has more than one predecessor, the edge
between BB and DEST is a critical edge and we must split it,
because we can only record one implicit set per DEST basic block. */
if (! single_pred_p (dest))
{
dest = split_edge (find_edge (bb, dest));
edges_split = true;
}
if (implicit_sets_size <= (size_t) dest->index)
{
size_t old_implicit_sets_size = implicit_sets_size;
implicit_sets_size *= 2;
implicit_sets = XRESIZEVEC (rtx, implicit_sets, implicit_sets_size);
memset (implicit_sets + old_implicit_sets_size, 0,
(implicit_sets_size - old_implicit_sets_size) * sizeof (rtx));
}
new_rtx = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
XEXP (cond, 1));
implicit_sets[dest->index] = new_rtx;
if (dump_file)
{
fprintf(dump_file, "Implicit set of reg %d in ",
REGNO (XEXP (cond, 0)));
fprintf(dump_file, "basic block %d\n", dest->index);
}
count++;
}
if (dump_file)
fprintf (dump_file, "Found %d implicit sets\n", count);
/* Confess our sins. */
return edges_split;
}
/* Bypass conditional jumps. */
/* The value of last_basic_block at the beginning of the jump_bypass
pass. The use of redirect_edge_and_branch_force may introduce new
basic blocks, but the data flow analysis is only valid for basic
block indices less than bypass_last_basic_block. */
static int bypass_last_basic_block;
/* Find a set of REGNO to a constant that is available at the end of basic
block BB. Return NULL if no such set is found. Based heavily upon
find_avail_set. */
static struct expr *
find_bypass_set (int regno, int bb)
{
struct expr *result = 0;
for (;;)
{
rtx src;
struct expr *set = lookup_set (regno, &set_hash_table);
while (set)
{
if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
break;
set = next_set (regno, set);
}
if (set == 0)
break;
src = set->src;
if (cprop_constant_p (src))
result = set;
if (! REG_P (src))
break;
regno = REGNO (src);
}
return result;
}
/* Subroutine of bypass_block that checks whether a pseudo is killed by
any of the instructions inserted on an edge. Jump bypassing places
condition code setters on CFG edges using insert_insn_on_edge. This
function is required to check that our data flow analysis is still
valid prior to commit_edge_insertions. */
static bool
reg_killed_on_edge (const_rtx reg, const_edge e)
{
rtx insn;
for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn) && reg_set_p (reg, insn))
return true;
return false;
}
/* Subroutine of bypass_conditional_jumps that attempts to bypass the given
basic block BB which has more than one predecessor. If not NULL, SETCC
is the first instruction of BB, which is immediately followed by JUMP_INSN
JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
Returns nonzero if a change was made.
During the jump bypassing pass, we may place copies of SETCC instructions
on CFG edges. The following routine must be careful to pay attention to
these inserted insns when performing its transformations. */
static int
bypass_block (basic_block bb, rtx setcc, rtx jump)
{
rtx insn, note;
edge e, edest;
int change;
int may_be_loop_header;
unsigned removed_p;
unsigned i;
edge_iterator ei;
insn = (setcc != NULL) ? setcc : jump;
/* Determine set of register uses in INSN. */
reg_use_count = 0;
note_uses (&PATTERN (insn), find_used_regs, NULL);
note = find_reg_equal_equiv_note (insn);
if (note)
find_used_regs (&XEXP (note, 0), NULL);
may_be_loop_header = false;
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & EDGE_DFS_BACK)
{
may_be_loop_header = true;
break;
}
change = 0;
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
{
removed_p = 0;
if (e->flags & EDGE_COMPLEX)
{
ei_next (&ei);
continue;
}
/* We can't redirect edges from new basic blocks. */
if (e->src->index >= bypass_last_basic_block)
{
ei_next (&ei);
continue;
}
/* The irreducible loops created by redirecting of edges entering the
loop from outside would decrease effectiveness of some of the
following optimizations, so prevent this. */
if (may_be_loop_header
&& !(e->flags & EDGE_DFS_BACK))
{
ei_next (&ei);
continue;
}
for (i = 0; i < reg_use_count; i++)
{
rtx reg_used = reg_use_table[i];
unsigned int regno = REGNO (reg_used);
basic_block dest, old_dest;
struct expr *set;
rtx src, new_rtx;
set = find_bypass_set (regno, e->src->index);
if (! set)
continue;
/* Check the data flow is valid after edge insertions. */
if (e->insns.r && reg_killed_on_edge (reg_used, e))
continue;
src = SET_SRC (pc_set (jump));
if (setcc != NULL)
src = simplify_replace_rtx (src,
SET_DEST (PATTERN (setcc)),
SET_SRC (PATTERN (setcc)));
new_rtx = simplify_replace_rtx (src, reg_used, set->src);
/* Jump bypassing may have already placed instructions on
edges of the CFG. We can't bypass an outgoing edge that
has instructions associated with it, as these insns won't
get executed if the incoming edge is redirected. */
if (new_rtx == pc_rtx)
{
edest = FALLTHRU_EDGE (bb);
dest = edest->insns.r ? NULL : edest->dest;
}
else if (GET_CODE (new_rtx) == LABEL_REF)
{
dest = BLOCK_FOR_INSN (XEXP (new_rtx, 0));
/* Don't bypass edges containing instructions. */
edest = find_edge (bb, dest);
if (edest && edest->insns.r)
dest = NULL;
}
else
dest = NULL;
/* Avoid unification of the edge with other edges from original
branch. We would end up emitting the instruction on "both"
edges. */
if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
&& find_edge (e->src, dest))
dest = NULL;
old_dest = e->dest;
if (dest != NULL
&& dest != old_dest
&& dest != EXIT_BLOCK_PTR)
{
if (current_loops != NULL
&& e->src->loop_father->latch == e->src)
{
/* ??? Now we are creating (or may create) a loop
with multiple entries. Simply mark it for
removal. Alternatively we could not do this
threading. */
e->src->loop_father->header = NULL;
e->src->loop_father->latch = NULL;
}
redirect_edge_and_branch_force (e, dest);
/* Copy the register setter to the redirected edge.
Don't copy CC0 setters, as CC0 is dead after jump. */
if (setcc)
{
rtx pat = PATTERN (setcc);
if (!CC0_P (SET_DEST (pat)))
insert_insn_on_edge (copy_insn (pat), e);
}
if (dump_file != NULL)
{
fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
"in jump_insn %d equals constant ",
regno, INSN_UID (jump));
print_rtl (dump_file, set->src);
fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
e->src->index, old_dest->index, dest->index);
}
change = 1;
removed_p = 1;
break;
}
}
if (!removed_p)
ei_next (&ei);
}
return change;
}
/* Find basic blocks with more than one predecessor that only contain a
single conditional jump. If the result of the comparison is known at
compile-time from any incoming edge, redirect that edge to the
appropriate target. Return nonzero if a change was made.
This function is now mis-named, because we also handle indirect jumps. */
static int
bypass_conditional_jumps (void)
{
basic_block bb;
int changed;
rtx setcc;
rtx insn;
rtx dest;
/* Note we start at block 1. */
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
return 0;
bypass_last_basic_block = last_basic_block;
mark_dfs_back_edges ();
changed = 0;
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
EXIT_BLOCK_PTR, next_bb)
{
/* Check for more than one predecessor. */
if (!single_pred_p (bb))
{
setcc = NULL_RTX;
FOR_BB_INSNS (bb, insn)
if (DEBUG_INSN_P (insn))
continue;
else if (NONJUMP_INSN_P (insn))
{
if (setcc)
break;
if (GET_CODE (PATTERN (insn)) != SET)
break;
dest = SET_DEST (PATTERN (insn));
if (REG_P (dest) || CC0_P (dest))
setcc = insn;
else
break;
}
else if (JUMP_P (insn))
{
if ((any_condjump_p (insn) || computed_jump_p (insn))
&& onlyjump_p (insn))
changed |= bypass_block (bb, setcc, insn);
break;
}
else if (INSN_P (insn))
break;
}
}
/* If we bypassed any register setting insns, we inserted a
copy on the redirected edge. These need to be committed. */
if (changed)
commit_edge_insertions ();
return changed;
}
/* Return true if the graph is too expensive to optimize. PASS is the
optimization about to be performed. */
static bool
is_too_expensive (const char *pass)
{
/* Trying to perform global optimizations on flow graphs which have
a high connectivity will take a long time and is unlikely to be
particularly useful.
In normal circumstances a cfg should have about twice as many
edges as blocks. But we do not want to punish small functions
which have a couple switch statements. Rather than simply
threshold the number of blocks, uses something with a more
graceful degradation. */
if (n_edges > 20000 + n_basic_blocks * 4)
{
warning (OPT_Wdisabled_optimization,
"%s: %d basic blocks and %d edges/basic block",
pass, n_basic_blocks, n_edges / n_basic_blocks);
return true;
}
/* If allocating memory for the cprop bitmap would take up too much
storage it's better just to disable the optimization. */
if ((n_basic_blocks
* SBITMAP_SET_SIZE (max_reg_num ())
* sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
{
warning (OPT_Wdisabled_optimization,
"%s: %d basic blocks and %d registers",
pass, n_basic_blocks, max_reg_num ());
return true;
}
return false;
}
/* Main function for the CPROP pass. */
static int
one_cprop_pass (void)
{
int i;
int changed = 0;
/* Return if there's nothing to do, or it is too expensive. */
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
|| is_too_expensive (_ ("const/copy propagation disabled")))
return 0;
global_const_prop_count = local_const_prop_count = 0;
global_copy_prop_count = local_copy_prop_count = 0;
bytes_used = 0;
gcc_obstack_init (&cprop_obstack);
/* Do a local const/copy propagation pass first. The global pass
only handles global opportunities.
If the local pass changes something, remove any unreachable blocks
because the CPROP global dataflow analysis may get into infinite
loops for CFGs with unreachable blocks.
FIXME: This local pass should not be necessary after CSE (but for
some reason it still is). It is also (proven) not necessary
to run the local pass right after FWPWOP.
FIXME: The global analysis would not get into infinite loops if it
would use the DF solver (via df_simple_dataflow) instead of
the solver implemented in this file. */
changed |= local_cprop_pass ();
if (changed)
delete_unreachable_blocks ();
/* Determine implicit sets. This may change the CFG (split critical
edges if that exposes an implicit set).
Note that find_implicit_sets() does not rely on up-to-date DF caches
so that we do not have to re-run df_analyze() even if local CPROP
changed something.
??? This could run earlier so that any uncovered implicit sets
sets could be exploited in local_cprop_pass() also. Later. */
changed |= find_implicit_sets ();
/* If local_cprop_pass() or find_implicit_sets() changed something,
run df_analyze() to bring all insn caches up-to-date, and to take
new basic blocks from edge splitting on the DF radar.
NB: This also runs the fast DCE pass, because execute_rtl_cprop
sets DF_LR_RUN_DCE. */
if (changed)
df_analyze ();
/* Initialize implicit_set_indexes array. */
implicit_set_indexes = XNEWVEC (int, last_basic_block);
for (i = 0; i < last_basic_block; i++)
implicit_set_indexes[i] = -1;
alloc_hash_table (&set_hash_table);
compute_hash_table (&set_hash_table);
/* Free implicit_sets before peak usage. */
free (implicit_sets);
implicit_sets = NULL;
if (dump_file)
dump_hash_table (dump_file, "SET", &set_hash_table);
if (set_hash_table.n_elems > 0)
{
basic_block bb;
rtx insn;
alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
compute_cprop_data ();
free (implicit_set_indexes);
implicit_set_indexes = NULL;
/* Allocate vars to track sets of regs. */
reg_set_bitmap = ALLOC_REG_SET (NULL);
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR,
next_bb)
{
/* Reset tables used to keep track of what's still valid [since
the start of the block]. */
reset_opr_set_tables ();
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
{
changed |= cprop_insn (insn);
/* Keep track of everything modified by this insn. */
/* ??? Need to be careful w.r.t. mods done to INSN.
Don't call mark_oprs_set if we turned the
insn into a NOTE, or deleted the insn. */
if (! NOTE_P (insn) && ! INSN_DELETED_P (insn))
mark_oprs_set (insn);
}
}
changed |= bypass_conditional_jumps ();
FREE_REG_SET (reg_set_bitmap);
free_cprop_mem ();
}
else
{
free (implicit_set_indexes);
implicit_set_indexes = NULL;
}
free_hash_table (&set_hash_table);
obstack_free (&cprop_obstack, NULL);
if (dump_file)
{
fprintf (dump_file, "CPROP of %s, %d basic blocks, %d bytes needed, ",
current_function_name (), n_basic_blocks, bytes_used);
fprintf (dump_file, "%d local const props, %d local copy props, ",
local_const_prop_count, local_copy_prop_count);
fprintf (dump_file, "%d global const props, %d global copy props\n\n",
global_const_prop_count, global_copy_prop_count);
}
return changed;
}
/* All the passes implemented in this file. Each pass has its
own gate and execute function, and at the end of the file a
pass definition for passes.c.
We do not construct an accurate cfg in functions which call
setjmp, so none of these passes runs if the function calls
setjmp.
FIXME: Should just handle setjmp via REG_SETJMP notes. */
static bool
gate_rtl_cprop (void)
{
return optimize > 0 && flag_gcse
&& !cfun->calls_setjmp
&& dbg_cnt (cprop);
}
static unsigned int
execute_rtl_cprop (void)
{
int changed;
delete_unreachable_blocks ();
df_set_flags (DF_LR_RUN_DCE);
df_analyze ();
changed = one_cprop_pass ();
flag_rerun_cse_after_global_opts |= changed;
if (changed)
cleanup_cfg (CLEANUP_CFG_CHANGED);
return 0;
}
struct rtl_opt_pass pass_rtl_cprop =
{
{
RTL_PASS,
"cprop", /* name */
gate_rtl_cprop, /* gate */
execute_rtl_cprop, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
TV_CPROP, /* tv_id */
PROP_cfglayout, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_df_finish | TODO_verify_rtl_sharing |
TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
}
};
|