};
SHAPE (binary_opt_n)
+/* <T0>_t vfoo[t0](<T0>_t, <T0>_t)
+ <T0>_t vfoo[_n_t0](<T0>_t, <S0>_t)
+
+ Where the _n form only supports s16/s32/u16/u32 types as for vorrq.
+
+ Example: vorrq.
+ int16x8_t [__arm_]vorrq[_s16](int16x8_t a, int16x8_t b)
+ int16x8_t [__arm_]vorrq_m[_s16](int16x8_t inactive, int16x8_t a, int16x8_t b, mve_pred16_t p)
+ int16x8_t [__arm_]vorrq_x[_s16](int16x8_t a, int16x8_t b, mve_pred16_t p)
+ int16x8_t [__arm_]vorrq[_n_s16](int16x8_t a, const int16_t imm)
+ int16x8_t [__arm_]vorrq_m_n[_s16](int16x8_t a, const int16_t imm, mve_pred16_t p) */
+struct binary_orrq_def : public overloaded_base<0>
+{
+ bool
+ explicit_mode_suffix_p (enum predication_index pred, enum mode_suffix_index mode) const override
+ {
+ return (mode == MODE_n
+ && pred == PRED_m);
+ }
+
+ bool
+ skip_overload_p (enum predication_index pred, enum mode_suffix_index mode) const override
+ {
+ switch (mode)
+ {
+ case MODE_none:
+ return false;
+
+ /* For MODE_n, share the overloaded instance with MODE_none, except for PRED_m. */
+ case MODE_n:
+ return pred != PRED_m;
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+
+ void
+ build (function_builder &b, const function_group_info &group,
+ bool preserve_user_namespace) const override
+ {
+ b.add_overloaded_functions (group, MODE_none, preserve_user_namespace);
+ b.add_overloaded_functions (group, MODE_n, preserve_user_namespace);
+ build_all (b, "v0,v0,v0", group, MODE_none, preserve_user_namespace);
+ build_16_32 (b, "v0,v0,s0", group, MODE_n, preserve_user_namespace, false, preds_m_or_none);
+ }
+
+ tree
+ resolve (function_resolver &r) const override
+ {
+ unsigned int i, nargs;
+ type_suffix_index type;
+ if (!r.check_gp_argument (2, i, nargs)
+ || (type = r.infer_vector_type (0)) == NUM_TYPE_SUFFIXES)
+ return error_mark_node;
+
+ return r.finish_opt_n_resolution (i, 0, type);
+ }
+};
+SHAPE (binary_orrq)
+
/* <T0>[xN]_t vfoo_t0().
Example: vuninitializedq.