if co_stacksize was > 32767 (the maximum value
which can be stored in 16 bits (signed)),
the PyCodeObject would be written wrong.
So on the second import (reading the .pyc)
would cause a crash.
Since we can't change the PYC magic, we
go on (silently), but don't write the file.
This means everything will work, but
a .pyc will not be written and the file will need
to be parsed on each import.
I will backport.
#include <fcntl.h>
#endif
+/* check if the int_value can't be written in 15 bits (signed) */
+#define CANT_WRITE(int_value) (int_value > 32767)
+
extern time_t PyOS_GetLastModificationTime(char *, FILE *);
/* In getmtime.c */
{
FILE *fp;
+ if (CANT_WRITE(co->co_argcount) ||
+ CANT_WRITE(co->co_nlocals) ||
+ CANT_WRITE(co->co_stacksize) ||
+ CANT_WRITE(co->co_flags) ||
+ CANT_WRITE(co->co_firstlineno)) {
+ if (Py_VerboseFlag)
+ PySys_WriteStderr(
+ "# code too large: can't write %s\n",
+ cpathname);
+ return;
+ }
+
fp = open_exclusive(cpathname);
if (fp == NULL) {
if (Py_VerboseFlag)