Py_DECREF(tmp);
}
- // Intern non-string consants in the free-threaded build, but only if
+ // Intern non-string constants in the free-threaded build, but only if
// we are also immortalizing objects that use deferred reference
// counting.
PyThreadState *tstate = PyThreadState_GET();
* handles the case of no arguments and one positional argument, and calls
* complex_new(), implemented with Argument Clinic, to handle the remaining
* cases: 'real' and 'imag' arguments. This separation is well suited
- * for different constructor roles: convering a string or number to a complex
+ * for different constructor roles: converting a string or number to a complex
* number and constructing a complex number from real and imaginary parts.
*/
static PyObject *
#ifdef Py_GIL_DISABLED
// Grabs the key and/or value from the provided locations and if successful
-// returns them with an increased reference count. If either one is unsucessful
+// returns them with an increased reference count. If either one is unsuccessful
// nothing is incref'd and returns -1.
static int
acquire_key_value(PyObject **key_loc, PyObject *value, PyObject **value_loc,
/* In general, as things go on we've established that the slice starts
with a monotone run of n elements, starting at lo. */
- /* We're n elements into the slice, and the most recent neq+1 elments are
+ /* We're n elements into the slice, and the most recent neq+1 elements are
* all equal. This reverses them in-place, and resets neq for reuse.
*/
#define REVERSE_LAST_NEQ \
Py_ssize_t neq = 0;
for ( ; n < nremaining; ++n) {
IF_NEXT_SMALLER {
- /* This ends the most recent run of equal elments, but still in
+ /* This ends the most recent run of equal elements, but still in
* the "descending" direction.
*/
REVERSE_LAST_NEQ
/* ----------------------------------------------------------------------------
Concurrent bitmap that can set/reset sequences of bits atomically,
-represeted as an array of fields where each field is a machine word (`size_t`)
+represented as an array of fields where each field is a machine word (`size_t`)
There are two api's; the standard one cannot have sequences that cross
between the bitmap fields (and a sequence must be <= MI_BITMAP_FIELD_BITS).
return false;
}
-// Like _mi_bitmap_try_find_from_claim but with an extra predicate that must be fullfilled
+// Like _mi_bitmap_try_find_from_claim but with an extra predicate that must be fulfilled
bool _mi_bitmap_try_find_from_claim_pred(mi_bitmap_t bitmap, const size_t bitmap_fields,
const size_t start_field_idx, const size_t count,
mi_bitmap_pred_fun_t pred_fun, void* pred_arg,
if (heap==NULL || !mi_heap_is_initialized(heap)) return;
if (!mi_heap_is_backing(heap)) {
- // tranfer still used pages to the backing heap
+ // transfer still used pages to the backing heap
mi_heap_absorb(heap->tld->heap_backing, heap);
}
else {
typedef struct _obmalloc_state OMState;
/* obmalloc state for main interpreter and shared by all interpreters without
- * their own obmalloc state. By not explicitly initalizing this structure, it
+ * their own obmalloc state. By not explicitly initializing this structure, it
* will be allocated in the BSS which is a small performance win. The radix
* tree arrays are fairly large but are sparsely used. */
static struct _obmalloc_state obmalloc_state_main;
ucnhash_capi = (_PyUnicode_Name_CAPI *)PyCapsule_Import(
PyUnicodeData_CAPSULE_NAME, 1);
- // It's fine if we overwite the value here. It's always the same value.
+ // It's fine if we overwrite the value here. It's always the same value.
_Py_atomic_store_ptr(&interp->unicode.ucnhash_capi, ucnhash_capi);
}
return ucnhash_capi;