}
}
+FIXTURE(v_csr_valid)
+{
+};
+
+FIXTURE_SETUP(v_csr_valid)
+{
+}
+
+FIXTURE_TEARDOWN(v_csr_valid)
+{
+}
+
+/* modifications of the initial vsetvli settings */
+FIXTURE_VARIANT(v_csr_valid)
+{
+ unsigned long vstart;
+ unsigned long vl;
+ unsigned long vtype;
+ unsigned long vcsr;
+ unsigned long vlenb_mul;
+ unsigned long vlenb_min;
+ unsigned long vlenb_max;
+ unsigned long spec;
+};
+
+/* valid for VLEN >= 128: LMUL= 1/4, SEW = 32 */
+FIXTURE_VARIANT_ADD(v_csr_valid, frac_lmul1)
+{
+ .vstart = 0x0,
+ .vl = 0x0,
+ .vtype = 0x16,
+ .vcsr = 0x0,
+ .vlenb_mul = 0x1,
+ .vlenb_min = 0x10,
+ .vlenb_max = 0x0,
+ .spec = VECTOR_1_0,
+};
+
+/* valid for VLEN >= 16: LMUL= 2, SEW = 32 */
+FIXTURE_VARIANT_ADD(v_csr_valid, int_lmul1)
+{
+ .vstart = 0x0,
+ .vl = 0x0,
+ .vtype = 0x11,
+ .vcsr = 0x0,
+ .vlenb_mul = 0x1,
+ .vlenb_min = 0x2,
+ .vlenb_max = 0x0,
+ .spec = VECTOR_1_0,
+};
+
+/* valid for XTheadVector VLEN >= 16: LMUL= 2, SEW = 32 */
+FIXTURE_VARIANT_ADD(v_csr_valid, int_lmul2)
+{
+ .vstart = 0x0,
+ .vl = 0x0,
+ .vtype = 0x9,
+ .vcsr = 0x0,
+ .vlenb_mul = 0x1,
+ .vlenb_min = 0x2,
+ .vlenb_max = 0x0,
+ .spec = XTHEAD_VECTOR_0_7,
+};
+
+/* valid for VLEN >= 32: LMUL= 2, SEW = 32, VL = 2 */
+FIXTURE_VARIANT_ADD(v_csr_valid, int_lmul3)
+{
+ .vstart = 0x0,
+ .vl = 0x2,
+ .vtype = 0x11,
+ .vcsr = 0x0,
+ .vlenb_mul = 0x1,
+ .vlenb_min = 0x4,
+ .vlenb_max = 0x0,
+ .spec = VECTOR_1_0,
+};
+
+TEST_F(v_csr_valid, ptrace_v_valid_values)
+{
+ unsigned long vlenb;
+ pid_t pid;
+
+ if (!is_vector_supported() && !is_xtheadvector_supported())
+ SKIP(return, "Vectors not supported");
+
+ if (is_vector_supported() && !vector_test(variant->spec))
+ SKIP(return, "Test not supported for Vector");
+
+ if (is_xtheadvector_supported() && !xthead_test(variant->spec))
+ SKIP(return, "Test not supported for XTheadVector");
+
+ vlenb = get_vr_len();
+
+ if (variant->vlenb_min) {
+ if (vlenb < variant->vlenb_min)
+ SKIP(return, "This test does not support VLEN < %lu\n",
+ variant->vlenb_min * 8);
+ }
+ if (variant->vlenb_max) {
+ if (vlenb > variant->vlenb_max)
+ SKIP(return, "This test does not support VLEN > %lu\n",
+ variant->vlenb_max * 8);
+ }
+
+ chld_lock = 1;
+ pid = fork();
+ ASSERT_LE(0, pid)
+ TH_LOG("fork: %m");
+
+ if (pid == 0) {
+ unsigned long vl;
+
+ while (chld_lock == 1)
+ asm volatile("" : : "g"(chld_lock) : "memory");
+
+ if (is_xtheadvector_supported()) {
+ asm volatile (
+ // 0 | zimm[10:0] | rs1 | 1 1 1 | rd |1010111| vsetvli
+ // vsetvli t4, x0, e16, m2, d1
+ ".4byte 0b00000000010100000111111011010111\n"
+ "mv %[new_vl], t4\n"
+ : [new_vl] "=r" (vl) : : "t4");
+ } else {
+ asm volatile (
+ ".option push\n"
+ ".option arch, +zve32x\n"
+ "vsetvli %[new_vl], x0, e16, m2, tu, mu\n"
+ ".option pop\n"
+ : [new_vl] "=r"(vl) : : );
+ }
+
+ asm volatile (
+ ".option push\n"
+ ".option norvc\n"
+ ".option arch, +zve32x\n"
+ "ebreak\n" /* breakpoint 1: apply new V state using ptrace */
+ "nop\n"
+ "ebreak\n" /* breakpoint 2: V state clean - context will not be saved */
+ "vmv.v.i v0, -1\n"
+ "ebreak\n" /* breakpoint 3: V state dirty - context will be saved */
+ ".option pop\n");
+ } else {
+ struct __riscv_v_regset_state *regset_data;
+ struct user_regs_struct regs;
+ size_t regset_size;
+ struct iovec iov;
+ int status;
+
+ /* attach */
+
+ ASSERT_EQ(0, ptrace(PTRACE_ATTACH, pid, NULL, NULL));
+ ASSERT_EQ(pid, waitpid(pid, &status, 0));
+ ASSERT_TRUE(WIFSTOPPED(status));
+
+ /* unlock */
+
+ ASSERT_EQ(0, ptrace(PTRACE_POKEDATA, pid, &chld_lock, 0));
+
+ /* resume and wait for the 1st ebreak */
+
+ ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
+ ASSERT_EQ(pid, waitpid(pid, &status, 0));
+ ASSERT_TRUE(WIFSTOPPED(status));
+
+ /* read tracee vector csr regs using ptrace GETREGSET */
+
+ regset_size = sizeof(*regset_data) + vlenb * 32;
+ regset_data = calloc(1, regset_size);
+
+ iov.iov_base = regset_data;
+ iov.iov_len = regset_size;
+
+ ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
+
+ /* verify initial vsetvli settings */
+
+ if (is_xtheadvector_supported())
+ EXPECT_EQ(5UL, regset_data->vtype);
+ else
+ EXPECT_EQ(9UL, regset_data->vtype);
+
+ EXPECT_EQ(regset_data->vlenb, regset_data->vl);
+ EXPECT_EQ(vlenb, regset_data->vlenb);
+ EXPECT_EQ(0UL, regset_data->vstart);
+ EXPECT_EQ(0UL, regset_data->vcsr);
+
+ /* apply valid settings from fixture variants */
+
+ regset_data->vlenb *= variant->vlenb_mul;
+ regset_data->vstart = variant->vstart;
+ regset_data->vtype = variant->vtype;
+ regset_data->vcsr = variant->vcsr;
+ regset_data->vl = variant->vl;
+
+ iov.iov_base = regset_data;
+ iov.iov_len = regset_size;
+
+ ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_RISCV_VECTOR, &iov));
+
+ /* skip 1st ebreak, then resume and wait for the 2nd ebreak */
+
+ iov.iov_base = ®s;
+ iov.iov_len = sizeof(regs);
+
+ ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS, &iov));
+ regs.pc += 4;
+ ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_PRSTATUS, &iov));
+
+ ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
+ ASSERT_EQ(pid, waitpid(pid, &status, 0));
+ ASSERT_TRUE(WIFSTOPPED(status));
+
+ /* read tracee vector csr regs using ptrace GETREGSET */
+
+ iov.iov_base = regset_data;
+ iov.iov_len = regset_size;
+
+ ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
+
+ /* verify vector csr regs from tracee context */
+
+ EXPECT_EQ(regset_data->vstart, variant->vstart);
+ EXPECT_EQ(regset_data->vtype, variant->vtype);
+ EXPECT_EQ(regset_data->vcsr, variant->vcsr);
+ EXPECT_EQ(regset_data->vl, variant->vl);
+ EXPECT_EQ(regset_data->vlenb, vlenb);
+
+ /* skip 2nd ebreak, then resume and wait for the 3rd ebreak */
+
+ iov.iov_base = ®s;
+ iov.iov_len = sizeof(regs);
+
+ ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS, &iov));
+ regs.pc += 4;
+ ASSERT_EQ(0, ptrace(PTRACE_SETREGSET, pid, NT_PRSTATUS, &iov));
+
+ ASSERT_EQ(0, ptrace(PTRACE_CONT, pid, NULL, NULL));
+ ASSERT_EQ(pid, waitpid(pid, &status, 0));
+ ASSERT_TRUE(WIFSTOPPED(status));
+
+ /* read tracee vector csr regs using ptrace GETREGSET */
+
+ iov.iov_base = regset_data;
+ iov.iov_len = regset_size;
+
+ ASSERT_EQ(0, ptrace(PTRACE_GETREGSET, pid, NT_RISCV_VECTOR, &iov));
+
+ /* verify vector csr regs from tracee context */
+
+ EXPECT_EQ(regset_data->vstart, variant->vstart);
+ EXPECT_EQ(regset_data->vtype, variant->vtype);
+ EXPECT_EQ(regset_data->vcsr, variant->vcsr);
+ EXPECT_EQ(regset_data->vl, variant->vl);
+ EXPECT_EQ(regset_data->vlenb, vlenb);
+
+ /* cleanup */
+
+ ASSERT_EQ(0, kill(pid, SIGKILL));
+ }
+}
+
TEST_HARNESS_MAIN