Fix a copy&paste-o causing a segfault with sigsetjmp.

I'm not sure how this passed my code inspection and initial testing, it's
obviously wrong.  Found when debugging csh.
This commit is contained in:
Justin Hibbits 2016-10-29 01:22:55 +00:00
parent 0d0f264099
commit aab03089ee
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=308072
2 changed files with 46 additions and 46 deletions

View File

@ -95,29 +95,29 @@ END(setjmp)
WEAK_REFERENCE(CNAME(__longjmp), longjmp)
ENTRY(__longjmp)
evldd %r9,24+0*8(%r6)
evldd %r10,24+1*8(%r6)
evldd %r11,24+2*8(%r6)
evldd %r12,24+3*8(%r6)
evldd %r13,24+4*8(%r6)
evldd %r14,24+5*8(%r6)
evldd %r15,24+6*8(%r6)
evldd %r16,24+7*8(%r6)
evldd %r17,24+8*8(%r6)
evldd %r18,24+9*8(%r6)
evldd %r19,24+10*8(%r6)
evldd %r20,24+11*8(%r6)
evldd %r21,24+12*8(%r6)
evldd %r22,24+13*8(%r6)
evldd %r23,24+14*8(%r6)
evldd %r24,24+15*8(%r6)
evldd %r25,24+16*8(%r6)
evldd %r26,24+17*8(%r6)
evldd %r27,24+18*8(%r6)
evldd %r28,24+19*8(%r6)
evldd %r29,24+20*8(%r6)
evldd %r30,24+21*8(%r6)
evldd %r31,24+22*8(%r6)
evldd %r9,24+0*8(%r3)
evldd %r10,24+1*8(%r3)
evldd %r11,24+2*8(%r3)
evldd %r12,24+3*8(%r3)
evldd %r13,24+4*8(%r3)
evldd %r14,24+5*8(%r3)
evldd %r15,24+6*8(%r3)
evldd %r16,24+7*8(%r3)
evldd %r17,24+8*8(%r3)
evldd %r18,24+9*8(%r3)
evldd %r19,24+10*8(%r3)
evldd %r20,24+11*8(%r3)
evldd %r21,24+12*8(%r3)
evldd %r22,24+13*8(%r3)
evldd %r23,24+14*8(%r3)
evldd %r24,24+15*8(%r3)
evldd %r25,24+16*8(%r3)
evldd %r26,24+17*8(%r3)
evldd %r27,24+18*8(%r3)
evldd %r28,24+19*8(%r3)
evldd %r29,24+20*8(%r3)
evldd %r30,24+21*8(%r3)
evldd %r31,24+22*8(%r3)
mr %r6,%r4 /* save val param */
mtlr %r11 /* r11 -> link reg */

View File

@ -103,29 +103,29 @@ END(sigsetjmp)
ENTRY(siglongjmp)
/* FPRs */
evldd %r9,24+0*8(%r6)
evldd %r10,24+1*8(%r6)
evldd %r11,24+2*8(%r6)
evldd %r12,24+3*8(%r6)
evldd %r13,24+4*8(%r6)
evldd %r14,24+5*8(%r6)
evldd %r15,24+6*8(%r6)
evldd %r16,24+7*8(%r6)
evldd %r17,24+8*8(%r6)
evldd %r18,24+9*8(%r6)
evldd %r19,24+10*8(%r6)
evldd %r20,24+11*8(%r6)
evldd %r21,24+12*8(%r6)
evldd %r22,24+13*8(%r6)
evldd %r23,24+14*8(%r6)
evldd %r24,24+15*8(%r6)
evldd %r25,24+16*8(%r6)
evldd %r26,24+17*8(%r6)
evldd %r27,24+18*8(%r6)
evldd %r28,24+19*8(%r6)
evldd %r29,24+20*8(%r6)
evldd %r30,24+21*8(%r6)
evldd %r31,24+22*8(%r6)
evldd %r9,24+0*8(%r3)
evldd %r10,24+1*8(%r3)
evldd %r11,24+2*8(%r3)
evldd %r12,24+3*8(%r3)
evldd %r13,24+4*8(%r3)
evldd %r14,24+5*8(%r3)
evldd %r15,24+6*8(%r3)
evldd %r16,24+7*8(%r3)
evldd %r17,24+8*8(%r3)
evldd %r18,24+9*8(%r3)
evldd %r19,24+10*8(%r3)
evldd %r20,24+11*8(%r3)
evldd %r21,24+12*8(%r3)
evldd %r22,24+13*8(%r3)
evldd %r23,24+14*8(%r3)
evldd %r24,24+15*8(%r3)
evldd %r25,24+16*8(%r3)
evldd %r26,24+17*8(%r3)
evldd %r27,24+18*8(%r3)
evldd %r28,24+19*8(%r3)
evldd %r29,24+20*8(%r3)
evldd %r30,24+21*8(%r3)
evldd %r31,24+22*8(%r3)
lwz %r7,0(%r3)
mr %r6,%r4