loongarch_fp_conditions[16]= {LARCH_FP_CONDITIONS (STRINGIFY)};
#undef STRINGIFY
+/* Size of guard page. */
+#define STACK_CLASH_PROTECTION_GUARD_SIZE \
+ (1 << param_stack_clash_protection_guard_size)
+
/* Implement TARGET_FUNCTION_ARG_BOUNDARY. Every parameter gets at
least PARM_BOUNDARY bits of alignment, but will be given anything up
to PREFERRED_STACK_BOUNDARY bits if the type requires it. */
static HOST_WIDE_INT
loongarch_first_stack_step (struct loongarch_frame_info *frame)
{
+ HOST_WIDE_INT min_first_step
+ = LARCH_STACK_ALIGN (frame->total_size - frame->fp_sp_offset);
+
+ /* When stack checking is required, if the sum of frame->total_size
+ and stack_check_protect is greater than stack clash protection guard
+ size, then return min_first_step. */
+ if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK
+ || (flag_stack_clash_protection
+ && frame->total_size > STACK_CLASH_PROTECTION_GUARD_SIZE))
+ return min_first_step;
+
if (IMM12_OPERAND (frame->total_size))
return frame->total_size;
- HOST_WIDE_INT min_first_step
- = LARCH_STACK_ALIGN (frame->total_size - frame->fp_sp_offset);
HOST_WIDE_INT max_first_step = IMM_REACH / 2 - PREFERRED_STACK_BOUNDARY / 8;
HOST_WIDE_INT min_second_step = frame->total_size - max_first_step;
gcc_assert (min_first_step <= max_first_step);
static void
loongarch_emit_probe_stack_range (HOST_WIDE_INT first, HOST_WIDE_INT size)
{
- /* See if we have a constant small number of probes to generate. If so,
- that's the easy case. */
- if ((TARGET_64BIT && (first + size <= 32768))
- || (!TARGET_64BIT && (first + size <= 2048)))
- {
- HOST_WIDE_INT i;
+ HOST_WIDE_INT rounded_size;
+ HOST_WIDE_INT interval;
- /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
- it exceeds SIZE. If only one probe is needed, this will not
- generate any code. Then probe at FIRST + SIZE. */
- for (i = PROBE_INTERVAL; i < size; i += PROBE_INTERVAL)
- emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
- -(first + i)));
+ if (flag_stack_clash_protection)
+ interval = STACK_CLASH_PROTECTION_GUARD_SIZE;
+ else
+ interval = PROBE_INTERVAL;
- emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
- -(first + size)));
- }
+ rtx r12 = LARCH_PROLOGUE_TEMP2 (Pmode);
+ rtx r14 = LARCH_PROLOGUE_TEMP3 (Pmode);
- /* Otherwise, do the same as above, but in a loop. Note that we must be
- extra careful with variables wrapping around because we might be at
- the very top (or the very bottom) of the address space and we have
- to be able to handle this case properly; in particular, we use an
- equality test for the loop condition. */
- else
- {
- HOST_WIDE_INT rounded_size;
- rtx r13 = LARCH_PROLOGUE_TEMP (Pmode);
- rtx r12 = LARCH_PROLOGUE_TEMP2 (Pmode);
- rtx r14 = LARCH_PROLOGUE_TEMP3 (Pmode);
+ size = size + first;
- /* Sanity check for the addressing mode we're going to use. */
- gcc_assert (first <= 16384);
+ /* Sanity check for the addressing mode we're going to use. */
+ gcc_assert (first <= 16384);
+ /* Step 1: round SIZE to the previous multiple of the interval. */
- /* Step 1: round SIZE to the previous multiple of the interval. */
+ rounded_size = ROUND_DOWN (size, interval);
- rounded_size = ROUND_DOWN (size, PROBE_INTERVAL);
+ /* Step 2: compute initial and final value of the loop counter. */
- /* TEST_ADDR = SP + FIRST */
- if (first != 0)
- {
- emit_move_insn (r14, GEN_INT (first));
- emit_insn (gen_rtx_SET (r13, gen_rtx_MINUS (Pmode,
- stack_pointer_rtx,
- r14)));
- }
- else
- emit_move_insn (r13, stack_pointer_rtx);
+ emit_move_insn (r14, GEN_INT (interval));
+
+ /* If rounded_size is zero, it means that the space requested by
+ the local variable is less than the interval, and there is no
+ need to display and detect the allocated space. */
+ if (rounded_size != 0)
+ {
+ /* Step 3: the loop
+
+ do
+ {
+ TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
+ probe at TEST_ADDR
+ }
+ while (TEST_ADDR != LAST_ADDR)
- /* Step 2: compute initial and final value of the loop counter. */
+ probes at FIRST + N * PROBE_INTERVAL for values of N from 1
+ until it is equal to ROUNDED_SIZE. */
- emit_move_insn (r14, GEN_INT (PROBE_INTERVAL));
- /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
- if (rounded_size == 0)
- emit_move_insn (r12, r13);
+ if (rounded_size <= STACK_CLASH_MAX_UNROLL_PAGES * interval)
+ {
+ for (HOST_WIDE_INT i = 0; i < rounded_size; i += interval)
+ {
+ emit_insn (gen_rtx_SET (stack_pointer_rtx,
+ gen_rtx_MINUS (Pmode,
+ stack_pointer_rtx,
+ r14)));
+ emit_move_insn (gen_rtx_MEM (Pmode,
+ gen_rtx_PLUS (Pmode,
+ stack_pointer_rtx,
+ const0_rtx)),
+ const0_rtx);
+ emit_insn (gen_blockage ());
+ }
+ dump_stack_clash_frame_info (PROBE_INLINE, size != rounded_size);
+ }
else
{
emit_move_insn (r12, GEN_INT (rounded_size));
- emit_insn (gen_rtx_SET (r12, gen_rtx_MINUS (Pmode, r13, r12)));
- /* Step 3: the loop
-
- do
- {
- TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
- probe at TEST_ADDR
- }
- while (TEST_ADDR != LAST_ADDR)
-
- probes at FIRST + N * PROBE_INTERVAL for values of N from 1
- until it is equal to ROUNDED_SIZE. */
-
- emit_insn (gen_probe_stack_range (Pmode, r13, r13, r12, r14));
+ emit_insn (gen_rtx_SET (r12,
+ gen_rtx_MINUS (Pmode,
+ stack_pointer_rtx,
+ r12)));
+
+ emit_insn (gen_probe_stack_range (Pmode, stack_pointer_rtx,
+ stack_pointer_rtx, r12, r14));
+ emit_insn (gen_blockage ());
+ dump_stack_clash_frame_info (PROBE_LOOP, size != rounded_size);
}
+ }
+ else
+ dump_stack_clash_frame_info (NO_PROBE_SMALL_FRAME, true);
+
- /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
- that SIZE is equal to ROUNDED_SIZE. */
+ /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
+ that SIZE is equal to ROUNDED_SIZE. */
- if (size != rounded_size)
+ if (size != rounded_size)
+ {
+ if (size - rounded_size >= 2048)
{
- if (TARGET_64BIT)
- emit_stack_probe (plus_constant (Pmode, r12, rounded_size - size));
- else
- {
- HOST_WIDE_INT i;
- for (i = 2048; i < (size - rounded_size); i += 2048)
- {
- emit_stack_probe (plus_constant (Pmode, r12, -i));
- emit_insn (gen_rtx_SET (r12,
- plus_constant (Pmode, r12, -2048)));
- }
- rtx r1 = plus_constant (Pmode, r12,
- -(size - rounded_size - i + 2048));
- emit_stack_probe (r1);
- }
+ emit_move_insn (r14, GEN_INT (size - rounded_size));
+ emit_insn (gen_rtx_SET (stack_pointer_rtx,
+ gen_rtx_MINUS (Pmode,
+ stack_pointer_rtx,
+ r14)));
}
+ else
+ emit_insn (gen_rtx_SET (stack_pointer_rtx,
+ gen_rtx_PLUS (Pmode,
+ stack_pointer_rtx,
+ GEN_INT (rounded_size - size))));
}
+ if (first)
+ {
+ emit_move_insn (r12, GEN_INT (first));
+ emit_insn (gen_rtx_SET (stack_pointer_rtx,
+ gen_rtx_PLUS (Pmode,
+ stack_pointer_rtx, r12)));
+ }
/* Make sure nothing is scheduled before we are done. */
emit_insn (gen_blockage ());
}
/* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
xops[0] = reg1;
- xops[1] = GEN_INT (-PROBE_INTERVAL);
xops[2] = reg3;
if (TARGET_64BIT)
output_asm_insn ("sub.d\t%0,%0,%2", xops);
{
struct loongarch_frame_info *frame = &cfun->machine->frame;
HOST_WIDE_INT size = frame->total_size;
- HOST_WIDE_INT tmp;
rtx insn;
if (flag_stack_usage_info)
current_function_static_stack_size = size;
- if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK
- || flag_stack_clash_protection)
- {
- if (crtl->is_leaf && !cfun->calls_alloca)
- {
- if (size > PROBE_INTERVAL && size > get_stack_check_protect ())
- {
- tmp = size - get_stack_check_protect ();
- loongarch_emit_probe_stack_range (get_stack_check_protect (),
- tmp);
- }
- }
- else if (size > 0)
- loongarch_emit_probe_stack_range (get_stack_check_protect (), size);
- }
-
/* Save the registers. */
if ((frame->mask | frame->fmask) != 0)
{
loongarch_for_each_saved_reg (size, loongarch_save_reg);
}
-
/* Set up the frame pointer, if we're using one. */
if (frame_pointer_needed)
{
loongarch_emit_stack_tie ();
}
- /* Allocate the rest of the frame. */
+ if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK
+ || flag_stack_clash_protection)
+ {
+ HOST_WIDE_INT first = get_stack_check_protect ();
+
+ if (frame->total_size == 0)
+ {
+ /* do nothing. */
+ dump_stack_clash_frame_info (NO_PROBE_NO_FRAME, false);
+ return;
+ }
+
+ if (crtl->is_leaf && !cfun->calls_alloca)
+ {
+ HOST_WIDE_INT interval;
+
+ if (flag_stack_clash_protection)
+ interval = STACK_CLASH_PROTECTION_GUARD_SIZE;
+ else
+ interval = PROBE_INTERVAL;
+
+ if (size > interval && size > first)
+ loongarch_emit_probe_stack_range (first, size - first);
+ else
+ loongarch_emit_probe_stack_range (first, size);
+ }
+ else
+ loongarch_emit_probe_stack_range (first, size);
+
+ if (size > 0)
+ {
+ /* Describe the effect of the previous instructions. */
+ insn = plus_constant (Pmode, stack_pointer_rtx, -size);
+ insn = gen_rtx_SET (stack_pointer_rtx, insn);
+ loongarch_set_frame_expr (insn);
+ }
+ return;
+ }
+
if (size > 0)
{
if (IMM12_OPERAND (-size))
}
else
{
- loongarch_emit_move (LARCH_PROLOGUE_TEMP (Pmode), GEN_INT (-size));
+ loongarch_emit_move (LARCH_PROLOGUE_TEMP (Pmode),
+ GEN_INT (-size));
emit_insn (gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
LARCH_PROLOGUE_TEMP (Pmode)));
gcc_unreachable ();
}
+ /* Validate the guard size. */
+ int guard_size = param_stack_clash_protection_guard_size;
+
+ /* Enforce that interval is the same size as size so the mid-end does the
+ right thing. */
+ SET_OPTION_IF_UNSET (opts, &global_options_set,
+ param_stack_clash_protection_probe_interval,
+ guard_size);
+
loongarch_init_print_operand_punct ();
/* Set up array to map GCC register number to debug register number.