Commit 872f6debcae63309eb39bfc2cc9462fb83450ee0
1 parent
f54d8a1b3f
Exists in
master
and in
39 other branches
parisc: use conditional macro for 64-bit wide ops
This work enables us to remove -traditional from $AFLAGS on parisc. Signed-off-by: Kyle McMartin <kyle@mcmartin.ca>
Showing 3 changed files with 56 additions and 66 deletions Side-by-side Diff
arch/parisc/kernel/entry.S
... | ... | @@ -41,16 +41,8 @@ |
41 | 41 | #include <linux/init.h> |
42 | 42 | |
43 | 43 | #ifdef CONFIG_64BIT |
44 | -#define CMPIB cmpib,* | |
45 | -#define CMPB cmpb,* | |
46 | -#define COND(x) *x | |
47 | - | |
48 | 44 | .level 2.0w |
49 | 45 | #else |
50 | -#define CMPIB cmpib, | |
51 | -#define CMPB cmpb, | |
52 | -#define COND(x) x | |
53 | - | |
54 | 46 | .level 2.0 |
55 | 47 | #endif |
56 | 48 | |
57 | 49 | |
... | ... | @@ -958,9 +950,9 @@ |
958 | 950 | * Only do signals if we are returning to user space |
959 | 951 | */ |
960 | 952 | LDREG PT_IASQ0(%r16), %r20 |
961 | - CMPIB=,n 0,%r20,intr_restore /* backward */ | |
953 | + cmpib,COND(=),n 0,%r20,intr_restore /* backward */ | |
962 | 954 | LDREG PT_IASQ1(%r16), %r20 |
963 | - CMPIB=,n 0,%r20,intr_restore /* backward */ | |
955 | + cmpib,COND(=),n 0,%r20,intr_restore /* backward */ | |
964 | 956 | |
965 | 957 | copy %r0, %r25 /* long in_syscall = 0 */ |
966 | 958 | #ifdef CONFIG_64BIT |
967 | 959 | |
... | ... | @@ -1014,10 +1006,10 @@ |
1014 | 1006 | * we jump back to intr_restore. |
1015 | 1007 | */ |
1016 | 1008 | LDREG PT_IASQ0(%r16), %r20 |
1017 | - CMPIB= 0, %r20, intr_do_preempt | |
1009 | + cmpib,COND(=) 0, %r20, intr_do_preempt | |
1018 | 1010 | nop |
1019 | 1011 | LDREG PT_IASQ1(%r16), %r20 |
1020 | - CMPIB= 0, %r20, intr_do_preempt | |
1012 | + cmpib,COND(=) 0, %r20, intr_do_preempt | |
1021 | 1013 | nop |
1022 | 1014 | |
1023 | 1015 | #ifdef CONFIG_64BIT |
... | ... | @@ -1046,7 +1038,7 @@ |
1046 | 1038 | /* current_thread_info()->preempt_count */ |
1047 | 1039 | mfctl %cr30, %r1 |
1048 | 1040 | LDREG TI_PRE_COUNT(%r1), %r19 |
1049 | - CMPIB<> 0, %r19, intr_restore /* if preempt_count > 0 */ | |
1041 | + cmpib,COND(<>) 0, %r19, intr_restore /* if preempt_count > 0 */ | |
1050 | 1042 | nop /* prev insn branched backwards */ |
1051 | 1043 | |
1052 | 1044 | /* check if we interrupted a critical path */ |
... | ... | @@ -1065,7 +1057,7 @@ |
1065 | 1057 | */ |
1066 | 1058 | |
1067 | 1059 | intr_extint: |
1068 | - CMPIB=,n 0,%r16,1f | |
1060 | + cmpib,COND(=),n 0,%r16,1f | |
1069 | 1061 | |
1070 | 1062 | get_stack_use_cr30 |
1071 | 1063 | b,n 2f |
... | ... | @@ -1100,7 +1092,7 @@ |
1100 | 1092 | |
1101 | 1093 | ENTRY(intr_save) /* for os_hpmc */ |
1102 | 1094 | mfsp %sr7,%r16 |
1103 | - CMPIB=,n 0,%r16,1f | |
1095 | + cmpib,COND(=),n 0,%r16,1f | |
1104 | 1096 | get_stack_use_cr30 |
1105 | 1097 | b 2f |
1106 | 1098 | copy %r8,%r26 |
... | ... | @@ -1122,7 +1114,7 @@ |
1122 | 1114 | * adjust isr/ior below. |
1123 | 1115 | */ |
1124 | 1116 | |
1125 | - CMPIB=,n 6,%r26,skip_save_ior | |
1117 | + cmpib,COND(=),n 6,%r26,skip_save_ior | |
1126 | 1118 | |
1127 | 1119 | |
1128 | 1120 | mfctl %cr20, %r16 /* isr */ |
1129 | 1121 | |
... | ... | @@ -1451,11 +1443,11 @@ |
1451 | 1443 | bb,>=,n %r9,26,nadtlb_nullify /* m bit not set, just nullify */ |
1452 | 1444 | BL get_register,%r25 |
1453 | 1445 | extrw,u %r9,15,5,%r8 /* Get index register # */ |
1454 | - CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ | |
1446 | + cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ | |
1455 | 1447 | copy %r1,%r24 |
1456 | 1448 | BL get_register,%r25 |
1457 | 1449 | extrw,u %r9,10,5,%r8 /* Get base register # */ |
1458 | - CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ | |
1450 | + cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ | |
1459 | 1451 | BL set_register,%r25 |
1460 | 1452 | add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ |
1461 | 1453 | |
... | ... | @@ -1487,7 +1479,7 @@ |
1487 | 1479 | cmpb,<>,n %r16,%r17,nadtlb_fault /* Must be probe,[rw]*/ |
1488 | 1480 | BL get_register,%r25 /* Find the target register */ |
1489 | 1481 | extrw,u %r9,31,5,%r8 /* Get target register */ |
1490 | - CMPIB=,n -1,%r1,nadtlb_fault /* have to use slow path */ | |
1482 | + cmpib,COND(=),n -1,%r1,nadtlb_fault /* have to use slow path */ | |
1491 | 1483 | BL set_register,%r25 |
1492 | 1484 | copy %r0,%r1 /* Write zero to target register */ |
1493 | 1485 | b nadtlb_nullify /* Nullify return insn */ |
1494 | 1486 | |
... | ... | @@ -1571,12 +1563,12 @@ |
1571 | 1563 | L3_ptep ptp,pte,t0,va,dbit_fault |
1572 | 1564 | |
1573 | 1565 | #ifdef CONFIG_SMP |
1574 | - CMPIB=,n 0,spc,dbit_nolock_20w | |
1566 | + cmpib,COND(=),n 0,spc,dbit_nolock_20w | |
1575 | 1567 | load32 PA(pa_dbit_lock),t0 |
1576 | 1568 | |
1577 | 1569 | dbit_spin_20w: |
1578 | 1570 | LDCW 0(t0),t1 |
1579 | - cmpib,= 0,t1,dbit_spin_20w | |
1571 | + cmpib,COND(=) 0,t1,dbit_spin_20w | |
1580 | 1572 | nop |
1581 | 1573 | |
1582 | 1574 | dbit_nolock_20w: |
... | ... | @@ -1587,7 +1579,7 @@ |
1587 | 1579 | |
1588 | 1580 | idtlbt pte,prot |
1589 | 1581 | #ifdef CONFIG_SMP |
1590 | - CMPIB=,n 0,spc,dbit_nounlock_20w | |
1582 | + cmpib,COND(=),n 0,spc,dbit_nounlock_20w | |
1591 | 1583 | ldi 1,t1 |
1592 | 1584 | stw t1,0(t0) |
1593 | 1585 | |
... | ... | @@ -1607,7 +1599,7 @@ |
1607 | 1599 | L2_ptep ptp,pte,t0,va,dbit_fault |
1608 | 1600 | |
1609 | 1601 | #ifdef CONFIG_SMP |
1610 | - CMPIB=,n 0,spc,dbit_nolock_11 | |
1602 | + cmpib,COND(=),n 0,spc,dbit_nolock_11 | |
1611 | 1603 | load32 PA(pa_dbit_lock),t0 |
1612 | 1604 | |
1613 | 1605 | dbit_spin_11: |
... | ... | @@ -1629,7 +1621,7 @@ |
1629 | 1621 | |
1630 | 1622 | mtsp t1, %sr1 /* Restore sr1 */ |
1631 | 1623 | #ifdef CONFIG_SMP |
1632 | - CMPIB=,n 0,spc,dbit_nounlock_11 | |
1624 | + cmpib,COND(=),n 0,spc,dbit_nounlock_11 | |
1633 | 1625 | ldi 1,t1 |
1634 | 1626 | stw t1,0(t0) |
1635 | 1627 | |
... | ... | @@ -1647,7 +1639,7 @@ |
1647 | 1639 | L2_ptep ptp,pte,t0,va,dbit_fault |
1648 | 1640 | |
1649 | 1641 | #ifdef CONFIG_SMP |
1650 | - CMPIB=,n 0,spc,dbit_nolock_20 | |
1642 | + cmpib,COND(=),n 0,spc,dbit_nolock_20 | |
1651 | 1643 | load32 PA(pa_dbit_lock),t0 |
1652 | 1644 | |
1653 | 1645 | dbit_spin_20: |
... | ... | @@ -1666,7 +1658,7 @@ |
1666 | 1658 | idtlbt pte,prot |
1667 | 1659 | |
1668 | 1660 | #ifdef CONFIG_SMP |
1669 | - CMPIB=,n 0,spc,dbit_nounlock_20 | |
1661 | + cmpib,COND(=),n 0,spc,dbit_nounlock_20 | |
1670 | 1662 | ldi 1,t1 |
1671 | 1663 | stw t1,0(t0) |
1672 | 1664 | |
... | ... | @@ -1995,7 +1987,7 @@ |
1995 | 1987 | |
1996 | 1988 | /* We can't use "CMPIB<> PER_HPUX" since "im5" field is sign extended */ |
1997 | 1989 | ldo -PER_HPUX(%r19), %r19 |
1998 | - CMPIB<>,n 0,%r19,1f | |
1990 | + cmpib,COND(<>),n 0,%r19,1f | |
1999 | 1991 | |
2000 | 1992 | /* Save other hpux returns if personality is PER_HPUX */ |
2001 | 1993 | STREG %r22,TASK_PT_GR22(%r1) |
arch/parisc/kernel/pacache.S
... | ... | @@ -86,7 +86,7 @@ |
86 | 86 | LDREG ITLB_OFF_COUNT(%r1), %arg2 |
87 | 87 | LDREG ITLB_LOOP(%r1), %arg3 |
88 | 88 | |
89 | - ADDIB= -1, %arg3, fitoneloop /* Preadjust and test */ | |
89 | + addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */ | |
90 | 90 | movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */ |
91 | 91 | copy %arg0, %r28 /* Init base addr */ |
92 | 92 | |
93 | 93 | |
94 | 94 | |
... | ... | @@ -96,14 +96,14 @@ |
96 | 96 | copy %arg2, %r29 /* Init middle loop count */ |
97 | 97 | |
98 | 98 | fitmanymiddle: /* Loop if LOOP >= 2 */ |
99 | - ADDIB> -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ | |
99 | + addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */ | |
100 | 100 | pitlbe 0(%sr1, %r28) |
101 | 101 | pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */ |
102 | - ADDIB> -1, %r29, fitmanymiddle /* Middle loop decr */ | |
102 | + addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */ | |
103 | 103 | copy %arg3, %r31 /* Re-init inner loop count */ |
104 | 104 | |
105 | 105 | movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */ |
106 | - ADDIB<=,n -1, %r22, fitdone /* Outer loop count decr */ | |
106 | + addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */ | |
107 | 107 | |
108 | 108 | fitoneloop: /* Loop if LOOP = 1 */ |
109 | 109 | mtsp %r20, %sr1 |
110 | 110 | |
... | ... | @@ -111,10 +111,10 @@ |
111 | 111 | copy %arg2, %r29 /* init middle loop count */ |
112 | 112 | |
113 | 113 | fitonemiddle: /* Loop if LOOP = 1 */ |
114 | - ADDIB> -1, %r29, fitonemiddle /* Middle loop count decr */ | |
114 | + addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */ | |
115 | 115 | pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */ |
116 | 116 | |
117 | - ADDIB> -1, %r22, fitoneloop /* Outer loop count decr */ | |
117 | + addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */ | |
118 | 118 | add %r21, %r20, %r20 /* increment space */ |
119 | 119 | |
120 | 120 | fitdone: |
... | ... | @@ -129,7 +129,7 @@ |
129 | 129 | LDREG DTLB_OFF_COUNT(%r1), %arg2 |
130 | 130 | LDREG DTLB_LOOP(%r1), %arg3 |
131 | 131 | |
132 | - ADDIB= -1, %arg3, fdtoneloop /* Preadjust and test */ | |
132 | + addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */ | |
133 | 133 | movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */ |
134 | 134 | copy %arg0, %r28 /* Init base addr */ |
135 | 135 | |
136 | 136 | |
137 | 137 | |
... | ... | @@ -139,14 +139,14 @@ |
139 | 139 | copy %arg2, %r29 /* Init middle loop count */ |
140 | 140 | |
141 | 141 | fdtmanymiddle: /* Loop if LOOP >= 2 */ |
142 | - ADDIB> -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ | |
142 | + addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */ | |
143 | 143 | pdtlbe 0(%sr1, %r28) |
144 | 144 | pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */ |
145 | - ADDIB> -1, %r29, fdtmanymiddle /* Middle loop decr */ | |
145 | + addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */ | |
146 | 146 | copy %arg3, %r31 /* Re-init inner loop count */ |
147 | 147 | |
148 | 148 | movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */ |
149 | - ADDIB<=,n -1, %r22,fdtdone /* Outer loop count decr */ | |
149 | + addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */ | |
150 | 150 | |
151 | 151 | fdtoneloop: /* Loop if LOOP = 1 */ |
152 | 152 | mtsp %r20, %sr1 |
153 | 153 | |
... | ... | @@ -154,10 +154,10 @@ |
154 | 154 | copy %arg2, %r29 /* init middle loop count */ |
155 | 155 | |
156 | 156 | fdtonemiddle: /* Loop if LOOP = 1 */ |
157 | - ADDIB> -1, %r29, fdtonemiddle /* Middle loop count decr */ | |
157 | + addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */ | |
158 | 158 | pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */ |
159 | 159 | |
160 | - ADDIB> -1, %r22, fdtoneloop /* Outer loop count decr */ | |
160 | + addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */ | |
161 | 161 | add %r21, %r20, %r20 /* increment space */ |
162 | 162 | |
163 | 163 | |
164 | 164 | |
165 | 165 | |
166 | 166 | |
... | ... | @@ -210,18 +210,18 @@ |
210 | 210 | LDREG ICACHE_COUNT(%r1), %arg2 |
211 | 211 | LDREG ICACHE_LOOP(%r1), %arg3 |
212 | 212 | rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/ |
213 | - ADDIB= -1, %arg3, fioneloop /* Preadjust and test */ | |
213 | + addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */ | |
214 | 214 | movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */ |
215 | 215 | |
216 | 216 | fimanyloop: /* Loop if LOOP >= 2 */ |
217 | - ADDIB> -1, %r31, fimanyloop /* Adjusted inner loop decr */ | |
217 | + addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */ | |
218 | 218 | fice %r0(%sr1, %arg0) |
219 | 219 | fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */ |
220 | 220 | movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */ |
221 | - ADDIB<=,n -1, %arg2, fisync /* Outer loop decr */ | |
221 | + addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */ | |
222 | 222 | |
223 | 223 | fioneloop: /* Loop if LOOP = 1 */ |
224 | - ADDIB> -1, %arg2, fioneloop /* Outer loop count decr */ | |
224 | + addib,COND(>) -1, %arg2, fioneloop /* Outer loop count decr */ | |
225 | 225 | fice,m %arg1(%sr1, %arg0) /* Fice for one loop */ |
226 | 226 | |
227 | 227 | fisync: |
228 | 228 | |
229 | 229 | |
230 | 230 | |
... | ... | @@ -251,18 +251,18 @@ |
251 | 251 | LDREG DCACHE_COUNT(%r1), %arg2 |
252 | 252 | LDREG DCACHE_LOOP(%r1), %arg3 |
253 | 253 | rsm PSW_SM_I, %r22 |
254 | - ADDIB= -1, %arg3, fdoneloop /* Preadjust and test */ | |
254 | + addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */ | |
255 | 255 | movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */ |
256 | 256 | |
257 | 257 | fdmanyloop: /* Loop if LOOP >= 2 */ |
258 | - ADDIB> -1, %r31, fdmanyloop /* Adjusted inner loop decr */ | |
258 | + addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */ | |
259 | 259 | fdce %r0(%sr1, %arg0) |
260 | 260 | fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */ |
261 | 261 | movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */ |
262 | - ADDIB<=,n -1, %arg2, fdsync /* Outer loop decr */ | |
262 | + addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */ | |
263 | 263 | |
264 | 264 | fdoneloop: /* Loop if LOOP = 1 */ |
265 | - ADDIB> -1, %arg2, fdoneloop /* Outer loop count decr */ | |
265 | + addib,COND(>) -1, %arg2, fdoneloop /* Outer loop count decr */ | |
266 | 266 | fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */ |
267 | 267 | |
268 | 268 | fdsync: |
... | ... | @@ -343,7 +343,7 @@ |
343 | 343 | * non-taken backward branch. Note that .+4 is a backwards branch. |
344 | 344 | * The ldd should only get executed if the branch is taken. |
345 | 345 | */ |
346 | - ADDIB>,n -1, %r1, 1b /* bundle 10 */ | |
346 | + addib,COND(>),n -1, %r1, 1b /* bundle 10 */ | |
347 | 347 | ldd 0(%r25), %r19 /* start next loads */ |
348 | 348 | |
349 | 349 | #else |
... | ... | @@ -392,7 +392,7 @@ |
392 | 392 | stw %r21, 56(%r26) |
393 | 393 | stw %r22, 60(%r26) |
394 | 394 | ldo 64(%r26), %r26 |
395 | - ADDIB>,n -1, %r1, 1b | |
395 | + addib,COND(>),n -1, %r1, 1b | |
396 | 396 | ldw 0(%r25), %r19 |
397 | 397 | #endif |
398 | 398 | bv %r0(%r2) |
... | ... | @@ -516,7 +516,7 @@ |
516 | 516 | stw %r21, 56(%r28) |
517 | 517 | stw %r22, 60(%r28) |
518 | 518 | ldo 64(%r28), %r28 |
519 | - ADDIB> -1, %r1,1b | |
519 | + addib,COND(>) -1, %r1,1b | |
520 | 520 | ldo 64(%r29), %r29 |
521 | 521 | |
522 | 522 | bv %r0(%r2) |
... | ... | @@ -575,7 +575,7 @@ |
575 | 575 | std %r0, 104(%r28) |
576 | 576 | std %r0, 112(%r28) |
577 | 577 | std %r0, 120(%r28) |
578 | - ADDIB> -1, %r1, 1b | |
578 | + addib,COND(>) -1, %r1, 1b | |
579 | 579 | ldo 128(%r28), %r28 |
580 | 580 | |
581 | 581 | #else /* ! CONFIG_64BIT */ |
... | ... | @@ -598,7 +598,7 @@ |
598 | 598 | stw %r0, 52(%r28) |
599 | 599 | stw %r0, 56(%r28) |
600 | 600 | stw %r0, 60(%r28) |
601 | - ADDIB> -1, %r1, 1b | |
601 | + addib,COND(>) -1, %r1, 1b | |
602 | 602 | ldo 64(%r28), %r28 |
603 | 603 | #endif /* CONFIG_64BIT */ |
604 | 604 | |
... | ... | @@ -641,7 +641,7 @@ |
641 | 641 | fdc,m %r23(%r26) |
642 | 642 | fdc,m %r23(%r26) |
643 | 643 | fdc,m %r23(%r26) |
644 | - CMPB<< %r26, %r25,1b | |
644 | + cmpb,COND(<<) %r26, %r25,1b | |
645 | 645 | fdc,m %r23(%r26) |
646 | 646 | |
647 | 647 | sync |
... | ... | @@ -684,7 +684,7 @@ |
684 | 684 | fdc,m %r23(%sr3, %r26) |
685 | 685 | fdc,m %r23(%sr3, %r26) |
686 | 686 | fdc,m %r23(%sr3, %r26) |
687 | - CMPB<< %r26, %r25,1b | |
687 | + cmpb,COND(<<) %r26, %r25,1b | |
688 | 688 | fdc,m %r23(%sr3, %r26) |
689 | 689 | |
690 | 690 | sync |
... | ... | @@ -727,7 +727,7 @@ |
727 | 727 | fic,m %r23(%sr3, %r26) |
728 | 728 | fic,m %r23(%sr3, %r26) |
729 | 729 | fic,m %r23(%sr3, %r26) |
730 | - CMPB<< %r26, %r25,1b | |
730 | + cmpb,COND(<<) %r26, %r25,1b | |
731 | 731 | fic,m %r23(%sr3, %r26) |
732 | 732 | |
733 | 733 | sync |
... | ... | @@ -770,7 +770,7 @@ |
770 | 770 | pdc,m %r23(%r26) |
771 | 771 | pdc,m %r23(%r26) |
772 | 772 | pdc,m %r23(%r26) |
773 | - CMPB<< %r26, %r25, 1b | |
773 | + cmpb,COND(<<) %r26, %r25, 1b | |
774 | 774 | pdc,m %r23(%r26) |
775 | 775 | |
776 | 776 | sync |
... | ... | @@ -834,7 +834,7 @@ |
834 | 834 | fdc,m %r23(%r28) |
835 | 835 | fdc,m %r23(%r28) |
836 | 836 | fdc,m %r23(%r28) |
837 | - CMPB<< %r28, %r29, 1b | |
837 | + cmpb,COND(<<) %r28, %r29, 1b | |
838 | 838 | fdc,m %r23(%r28) |
839 | 839 | |
840 | 840 | sync |
... | ... | @@ -857,7 +857,7 @@ |
857 | 857 | ldo -1(%r23), %r21 |
858 | 858 | ANDCM %r26, %r21, %r26 |
859 | 859 | |
860 | -1: CMPB<<,n %r26, %r25, 1b | |
860 | +1: cmpb,COND(<<),n %r26, %r25, 1b | |
861 | 861 | fdc,m %r23(%sr3, %r26) |
862 | 862 | |
863 | 863 | sync |
... | ... | @@ -878,7 +878,7 @@ |
878 | 878 | ldo -1(%r23), %r21 |
879 | 879 | ANDCM %r26, %r21, %r26 |
880 | 880 | |
881 | -1: CMPB<<,n %r26, %r25,1b | |
881 | +1: cmpb,COND(<<),n %r26, %r25,1b | |
882 | 882 | fdc,m %r23(%r26) |
883 | 883 | |
884 | 884 | sync |
... | ... | @@ -900,7 +900,7 @@ |
900 | 900 | ldo -1(%r23), %r21 |
901 | 901 | ANDCM %r26, %r21, %r26 |
902 | 902 | |
903 | -1: CMPB<<,n %r26, %r25,1b | |
903 | +1: cmpb,COND(<<),n %r26, %r25,1b | |
904 | 904 | fic,m %r23(%sr3, %r26) |
905 | 905 | |
906 | 906 | sync |
... | ... | @@ -943,7 +943,7 @@ |
943 | 943 | fic,m %r23(%sr4, %r26) |
944 | 944 | fic,m %r23(%sr4, %r26) |
945 | 945 | fic,m %r23(%sr4, %r26) |
946 | - CMPB<< %r26, %r25, 1b | |
946 | + cmpb,COND(<<) %r26, %r25, 1b | |
947 | 947 | fic,m %r23(%sr4, %r26) |
948 | 948 | |
949 | 949 | sync |
... | ... | @@ -964,7 +964,7 @@ |
964 | 964 | ldo -1(%r23), %r21 |
965 | 965 | ANDCM %r26, %r21, %r26 |
966 | 966 | |
967 | -1: CMPB<<,n %r26, %r25, 1b | |
967 | +1: cmpb,COND(<<),n %r26, %r25, 1b | |
968 | 968 | fic,m %r23(%sr4, %r26) |
969 | 969 | |
970 | 970 | sync |
include/asm-parisc/assembly.h
... | ... | @@ -31,9 +31,8 @@ |
31 | 31 | #define STREGM std,ma |
32 | 32 | #define SHRREG shrd |
33 | 33 | #define SHLREG shld |
34 | -#define ADDIB addib,* | |
35 | -#define CMPB cmpb,* | |
36 | 34 | #define ANDCM andcm,* |
35 | +#define COND(x) * ## x | |
37 | 36 | #define RP_OFFSET 16 |
38 | 37 | #define FRAME_SIZE 128 |
39 | 38 | #define CALLEE_REG_FRAME_SIZE 144 |
40 | 39 | |
... | ... | @@ -46,9 +45,8 @@ |
46 | 45 | #define STREGM stwm |
47 | 46 | #define SHRREG shr |
48 | 47 | #define SHLREG shlw |
49 | -#define ADDIB addib, | |
50 | -#define CMPB cmpb, | |
51 | 48 | #define ANDCM andcm |
49 | +#define COND(x) x | |
52 | 50 | #define RP_OFFSET 20 |
53 | 51 | #define FRAME_SIZE 64 |
54 | 52 | #define CALLEE_REG_FRAME_SIZE 128 |