https://gcc.gnu.org/bugzilla/show_bug.cgi?id=92282

--- Comment #2 from Richard Biener <rguenth at gcc dot gnu.org> ---
Btw, x86 manages to generate

        movq    %rdi, %r9
        movq    %rsi, %r8
        movq    %r9, %rsi
        movq    %r8, %rdi
        subq    %rdx, %rsi
        sbbq    %rcx, %rdi
        movq    %rsi, %rax
        movq    %rdi, %rdx
        addq    $-1, %rax
        adcq    $-1, %rdx
        ret

for the unsigned int128 case vs

        movq    %rdx, %r8
        movq    %rdi, %r9
        notq    %rcx
        notq    %r8
        movq    %rcx, %rdx
        movq    %r8, %rax
        addq    %r9, %rax
        adcq    %rsi, %rdx
        ret

for the signed.

Reply via email to