Fix last grouplist patch.
{
long int inner;
for (inner = 0; inner < prev_start; ++inner)
- if ((*groupsp)[inner] == (*groups)[cnt])
+ if ((*groupsp)[inner] == (*groupsp)[cnt])
break;
if (inner < prev_start)
{
long int inner;
for (inner = 0; inner < prev_start; ++inner)
- if ((*groupsp)[inner] == (*groups)[cnt])
+ if (groups[inner] == groups[cnt])
break;
if (inner < prev_start)
++cnt;
else
- (*groupsp)[cnt] = (*groupsp)[--start];
+ groups[cnt] = groups[--start];
}
if (status != NSS_STATUS_TRYAGAIN)
\
*__gmemp == (oldval) ? (*__gmemp = __gnewval, 0) : 1; })
+/* XXX We do not really need 64-bit compare-and-exchange. At least
+ not in the moment. */
+# define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
+ ({ __typeof (*mem) ret = *(mem); abort (); ret = (newval); ret = (oldval); })
+
#endif
/* Note that we need no lock prefix. */