https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83653

Jakub Jelinek <jakub at gcc dot gnu.org> changed:

           What    |Removed                     |Added
----------------------------------------------------------------------------
                 CC|                            |jakub at gcc dot gnu.org

--- Comment #13 from Jakub Jelinek <jakub at gcc dot gnu.org> ---
What about this approach, force __builtin_constant_p to evaluate in the FE
before optimizations and decide just on that.  Will work with constant literals
passed to the macro, will do the fallback if you say use it in inline and hope
that if the inline is called with a constant it will propagate:

struct V { int counter; };
int ia64_fetch_and_add(int, int *);
int ia64_atomic_sub(int, struct V *);

#ifdef __OPTIMIZE__
#define atomic_sub_return_1(i,v,c)                                      \
({                                                                      \
        int __ia64_asr_i = (i);                                         \
        static const int __ia64_asr_p_##c                               \
          = __builtin_constant_p(i)                                     \
            ? ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16            \
               || (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16)    \
            : 0;                                                        \
        __ia64_asr_p_##c                                                \
          ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)            \
          : ia64_atomic_sub(__ia64_asr_i, v);                           \
})
#define atomic_sub_return_2(i,v,c) atomic_sub_return_1(i,v,c)
#define atomic_sub_return(i,v) atomic_sub_return_2(i,v,__COUNTER__)
#else
#define atomic_sub_return(i,v) ia64_atomic_sub(i,v)
#endif

void
foo (struct V *p, int i)
{
  atomic_sub_return (4, p);
  atomic_sub_return (8, p);
  atomic_sub_return (16, p);
  atomic_sub_return (7, p);
  atomic_sub_return (i, p);
}

Reply via email to