From: Michael Tremer Date: Sat, 26 Nov 2016 21:38:30 +0000 (+0100) Subject: Migrate to Python 3 X-Git-Tag: 0.9.28~1285^2~1464 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=964aa579ee1ad7e507e2d9b9824fe6bdcb4b2347;p=pakfire.git Migrate to Python 3 This will totally not run since some dependencies are missing (e.g. urlgrabber, gpgme). Signed-off-by: Michael Tremer --- diff --git a/.gitignore b/.gitignore index 293e999ee..9f8bba4d5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ /Makefile /build-aux +/intltool-* /libtool /missing /contrib/pakfire.nm diff --git a/Makefile.am b/Makefile.am index bf7e42fbd..4664632ba 100644 --- a/Makefile.am +++ b/Makefile.am @@ -110,7 +110,6 @@ pakfire_PYTHON = \ src/pakfire/i18n.py \ src/pakfire/keyring.py \ src/pakfire/logger.py \ - src/pakfire/lzma.py \ src/pakfire/progressbar.py \ src/pakfire/satsolver.py \ src/pakfire/server.py \ @@ -154,29 +153,6 @@ pakfire_repositorydir = $(pythondir)/pakfire/repository # ------------------------------------------------------------------------------ -pkgpyexec_LTLIBRARIES += \ - _lzma.la - -_lzma_la_SOURCES = \ - src/_lzma/_lzmamodule.c - -_lzma_la_CFLAGS = \ - $(AM_CFLAGS) \ - $(PYTHON_DEVEL_CFLAGS) \ - $(LZMA_CFLAGS) - -_lzma_la_LDFLAGS = \ - $(AM_LDFLAGS) \ - -shared \ - -module \ - -avoid-version - -_lzma_la_LIBADD = \ - $(PYTHON_DEVEL_LIBS) \ - $(LZMA_LIBS) - -# ------------------------------------------------------------------------------ - pkgpyexec_LTLIBRARIES += \ _pakfire.la diff --git a/configure.ac b/configure.ac index 1e188e751..441120b44 100644 --- a/configure.ac +++ b/configure.ac @@ -99,7 +99,7 @@ CC_CHECK_FLAGS_APPEND([with_ldflags], [LDFLAGS], [\ AC_SUBST([OUR_LDFLAGS], $with_ldflags) # Python -AM_PATH_PYTHON([2.7]) +AM_PATH_PYTHON([3.4]) save_LIBS="$LIBS" diff --git a/po/pakfire.pot b/po/pakfire.pot index 199c27b43..afd30e109 100644 --- a/po/pakfire.pot +++ b/po/pakfire.pot @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2013-10-26 21:51+0200\n" +"POT-Creation-Date: 2016-11-26 17:51+0100\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -197,37 +197,37 @@ msgstr "" msgid "%s has got no valid signatures" msgstr "" -#: ../src/pakfire/actions.py:234 +#: ../src/pakfire/actions.py:235 msgid "Could not handle scriptlet of unknown type. Skipping." msgstr "" -#: ../src/pakfire/actions.py:237 +#: ../src/pakfire/actions.py:238 msgid "Executing scriptlet..." msgstr "" -#: ../src/pakfire/actions.py:243 +#: ../src/pakfire/actions.py:244 #, python-format msgid "Cannot run scriptlet because no interpreter is available: %s" msgstr "" -#: ../src/pakfire/actions.py:247 +#: ../src/pakfire/actions.py:248 #, python-format msgid "Cannot run scriptlet because the interpreter is not executable: %s" msgstr "" -#: ../src/pakfire/actions.py:286 +#: ../src/pakfire/actions.py:287 #, python-format msgid "" "The scriptlet returned an error:\n" "%s" msgstr "" -#: ../src/pakfire/actions.py:289 +#: ../src/pakfire/actions.py:290 #, python-format msgid "The scriptlet ran more than %s seconds and was killed." msgstr "" -#: ../src/pakfire/actions.py:293 +#: ../src/pakfire/actions.py:294 #, python-format msgid "" "The scriptlet returned with an unhandled error:\n" @@ -237,42 +237,42 @@ msgstr "" #. This functions creates a fork with then chroots into the #. pakfire root if necessary and then compiles the given scriptlet #. code and runs it. -#: ../src/pakfire/actions.py:307 +#: ../src/pakfire/actions.py:308 msgid "Executing python scriptlet..." msgstr "" -#: ../src/pakfire/actions.py:332 +#: ../src/pakfire/actions.py:333 #, python-format msgid "Exception occured: %s" msgstr "" -#: ../src/pakfire/actions.py:405 ../src/pakfire/actions.py:452 -#: ../src/pakfire/actions.py:462 ../src/pakfire/actions.py:483 +#: ../src/pakfire/actions.py:406 ../src/pakfire/actions.py:453 +#: ../src/pakfire/actions.py:463 ../src/pakfire/actions.py:484 #, python-format msgid "Running transaction test for %s" msgstr "" -#: ../src/pakfire/actions.py:415 +#: ../src/pakfire/actions.py:416 msgid "Reinstalling" msgstr "" -#: ../src/pakfire/actions.py:417 +#: ../src/pakfire/actions.py:418 msgid "Updating" msgstr "" -#: ../src/pakfire/actions.py:419 +#: ../src/pakfire/actions.py:420 msgid "Downgrading" msgstr "" -#: ../src/pakfire/actions.py:421 +#: ../src/pakfire/actions.py:422 msgid "Installing" msgstr "" -#: ../src/pakfire/actions.py:469 +#: ../src/pakfire/actions.py:470 msgid "Cleanup" msgstr "" -#: ../src/pakfire/actions.py:471 +#: ../src/pakfire/actions.py:472 msgid "Removing" msgstr "" @@ -354,71 +354,71 @@ msgstr "" msgid "Extracting" msgstr "" -#: ../src/pakfire/builder.py:781 +#: ../src/pakfire/builder.py:779 msgid "You cannot run a build when no package was given." msgstr "" -#: ../src/pakfire/builder.py:785 +#: ../src/pakfire/builder.py:783 #, python-format msgid "Could not find makefile in build root: %s" msgstr "" -#: ../src/pakfire/builder.py:815 +#: ../src/pakfire/builder.py:813 msgid "Build failed" msgstr "" -#: ../src/pakfire/builder.py:818 +#: ../src/pakfire/builder.py:816 msgid "Build interrupted" msgstr "" -#: ../src/pakfire/builder.py:824 +#: ../src/pakfire/builder.py:822 msgid "Build failed." msgstr "" #. End here in case of an error. -#: ../src/pakfire/builder.py:840 +#: ../src/pakfire/builder.py:838 msgid "The build command failed. See logfile for details." msgstr "" -#: ../src/pakfire/builder.py:843 +#: ../src/pakfire/builder.py:841 msgid "Running installation test..." msgstr "" -#: ../src/pakfire/builder.py:849 +#: ../src/pakfire/builder.py:847 msgid "Installation test succeeded." msgstr "" #. Create a progressbar. -#: ../src/pakfire/builder.py:892 +#: ../src/pakfire/builder.py:890 msgid "Signing packages..." msgstr "" -#: ../src/pakfire/builder.py:926 +#: ../src/pakfire/builder.py:924 msgid "Dumping package information:" msgstr "" #. Package the result. #. Make all these little package from the build environment. -#: ../src/pakfire/builder.py:1078 +#: ../src/pakfire/builder.py:1076 msgid "Creating packages:" msgstr "" #. Execute the buildscript of this stage. -#: ../src/pakfire/builder.py:1092 +#: ../src/pakfire/builder.py:1090 #, python-format msgid "Running stage %s:" msgstr "" -#: ../src/pakfire/builder.py:1110 +#: ../src/pakfire/builder.py:1108 #, python-format msgid "Could not remove static libraries: %s" msgstr "" -#: ../src/pakfire/builder.py:1116 +#: ../src/pakfire/builder.py:1114 msgid "Compressing man pages did not complete successfully." msgstr "" -#: ../src/pakfire/builder.py:1136 +#: ../src/pakfire/builder.py:1134 msgid "Extracting debuginfo did not complete with success. Aborting build." msgstr "" @@ -1338,12 +1338,12 @@ msgstr "" msgid "Not set" msgstr "" -#: ../src/pakfire/packages/base.py:570 +#: ../src/pakfire/packages/base.py:573 #, python-format msgid "Config file saved as %s." msgstr "" -#: ../src/pakfire/packages/base.py:575 +#: ../src/pakfire/packages/base.py:578 #, python-format msgid "Preserving datafile '/%s'" msgstr "" @@ -1553,7 +1553,7 @@ msgstr "" msgid " Solutions:" msgstr "" -#: ../src/pakfire/server.py:279 ../src/pakfire/system.py:149 +#: ../src/pakfire/server.py:279 ../src/pakfire/system.py:153 msgid "Could not be determined" msgstr "" diff --git a/src/_lzma/_lzmamodule.c b/src/_lzma/_lzmamodule.c deleted file mode 100644 index 3a99714cf..000000000 --- a/src/_lzma/_lzmamodule.c +++ /dev/null @@ -1,1098 +0,0 @@ -/* _lzma - Low-level Python interface to liblzma. - - Initial implementation by Per Øyvind Karlsen. - Rewritten by Nadeem Vawda. - -*/ - -#define PY_SSIZE_T_CLEAN - -#include "Python.h" -#include "structmember.h" -#ifdef WITH_THREAD -#include "pythread.h" -#endif - -#include -#include - -#include - - -#ifndef PY_LONG_LONG -#error "This module requires PY_LONG_LONG to be defined" -#endif - - -#ifdef WITH_THREAD -#define ACQUIRE_LOCK(obj) do { \ - if (!PyThread_acquire_lock((obj)->lock, 0)) { \ - Py_BEGIN_ALLOW_THREADS \ - PyThread_acquire_lock((obj)->lock, 1); \ - Py_END_ALLOW_THREADS \ - } } while (0) -#define RELEASE_LOCK(obj) PyThread_release_lock((obj)->lock) -#else -#define ACQUIRE_LOCK(obj) -#define RELEASE_LOCK(obj) -#endif - - -/* Container formats: */ -enum { - FORMAT_AUTO, - FORMAT_XZ, - FORMAT_ALONE, - FORMAT_RAW, -}; - -#define LZMA_CHECK_UNKNOWN (LZMA_CHECK_ID_MAX + 1) - - -typedef struct { - PyObject_HEAD - lzma_stream lzs; - int flushed; -#ifdef WITH_THREAD - PyThread_type_lock lock; -#endif -} Compressor; - -typedef struct { - PyObject_HEAD - lzma_stream lzs; - int check; - char eof; - PyObject *unused_data; -#ifdef WITH_THREAD - PyThread_type_lock lock; -#endif -} Decompressor; - -/* LZMAError class object. */ -static PyObject *Error; - -/* An empty tuple, used by the filter specifier parsing code. */ -static PyObject *empty_tuple; - - -/* Helper functions. */ - -static int -catch_lzma_error(lzma_ret lzret) -{ - switch (lzret) { - case LZMA_OK: - case LZMA_GET_CHECK: - case LZMA_NO_CHECK: - case LZMA_STREAM_END: - return 0; - case LZMA_UNSUPPORTED_CHECK: - PyErr_SetString(Error, "Unsupported integrity check"); - return 1; - case LZMA_MEM_ERROR: - PyErr_NoMemory(); - return 1; - case LZMA_MEMLIMIT_ERROR: - PyErr_SetString(Error, "Memory usage limit exceeded"); - return 1; - case LZMA_FORMAT_ERROR: - PyErr_SetString(Error, "Input format not supported by decoder"); - return 1; - case LZMA_OPTIONS_ERROR: - PyErr_SetString(Error, "Invalid or unsupported options"); - return 1; - case LZMA_DATA_ERROR: - PyErr_SetString(Error, "Corrupt input data"); - return 1; - case LZMA_BUF_ERROR: - PyErr_SetString(Error, "Insufficient buffer space"); - return 1; - case LZMA_PROG_ERROR: - PyErr_SetString(Error, "Internal error"); - return 1; - default: - PyErr_Format(Error, "Unrecognized error from liblzma: %d", lzret); - return 1; - } -} - -#if BUFSIZ < 8192 -#define INITIAL_BUFFER_SIZE 8192 -#else -#define INITIAL_BUFFER_SIZE BUFSIZ -#endif - -static int -grow_buffer(PyObject **buf) -{ - size_t size = PyBytes_GET_SIZE(*buf); - return _PyBytes_Resize(buf, size + (size >> 3) + 6); -} - - -/* Some custom type conversions for PyArg_ParseTupleAndKeywords(), - since the predefined conversion specifiers do not suit our needs: - - uint32_t - the "I" (unsigned int) specifier is the right size, but - silently ignores overflows on conversion. - - lzma_mode and lzma_match_finder - these are enumeration types, and - so the size of each is implementation-defined. Worse, different - enum types can be of different sizes within the same program, so - to be strictly correct, we need to define two separate converters. - */ - -#define INT_TYPE_CONVERTER_FUNC(TYPE, FUNCNAME) \ - static int \ - FUNCNAME(PyObject *obj, void *ptr) \ - { \ - unsigned long val; \ - \ - val = PyLong_AsUnsignedLong(obj); \ - if (PyErr_Occurred()) \ - return 0; \ - if ((unsigned long)(TYPE)val != val) { \ - PyErr_SetString(PyExc_OverflowError, \ - "Value too large for " #TYPE " type"); \ - return 0; \ - } \ - *(TYPE *)ptr = val; \ - return 1; \ - } - -INT_TYPE_CONVERTER_FUNC(uint32_t, uint32_converter) -INT_TYPE_CONVERTER_FUNC(lzma_mode, lzma_mode_converter) -INT_TYPE_CONVERTER_FUNC(lzma_match_finder, lzma_mf_converter) - -#undef INT_TYPE_CONVERTER_FUNC - - -/* Filter specifier parsing functions. */ - -static void * -parse_filter_spec_lzma(PyObject *spec) -{ - static char *optnames[] = {"id", "preset", "dict_size", "lc", "lp", - "pb", "mode", "nice_len", "mf", "depth", NULL}; - PyObject *id; - PyObject *preset_obj; - uint32_t preset = LZMA_PRESET_DEFAULT; - lzma_options_lzma *options; - - /* First, fill in default values for all the options using a preset. - Then, override the defaults with any values given by the caller. */ - - preset_obj = PyMapping_GetItemString(spec, "preset"); - if (preset_obj == NULL) { - if (PyErr_ExceptionMatches(PyExc_KeyError)) - PyErr_Clear(); - else - return NULL; - } else { - int ok = uint32_converter(preset_obj, &preset); - Py_DECREF(preset_obj); - if (!ok) - return NULL; - } - - options = (lzma_options_lzma *)PyMem_Malloc(sizeof *options); - if (options == NULL) - return PyErr_NoMemory(); - memset(options, 0, sizeof *options); - - if (lzma_lzma_preset(options, preset)) { - PyMem_Free(options); - PyErr_Format(Error, "lzma_lzma_preset() failed for preset %#x", preset); - return NULL; - } - - if (!PyArg_ParseTupleAndKeywords(empty_tuple, spec, - "|OOO&O&O&O&O&O&O&O&", optnames, - &id, &preset_obj, - uint32_converter, &options->dict_size, - uint32_converter, &options->lc, - uint32_converter, &options->lp, - uint32_converter, &options->pb, - lzma_mode_converter, &options->mode, - uint32_converter, &options->nice_len, - lzma_mf_converter, &options->mf, - uint32_converter, &options->depth)) { - PyErr_SetString(PyExc_ValueError, - "Invalid filter specifier for LZMA filter"); - PyMem_Free(options); - options = NULL; - } - return options; -} - -static void * -parse_filter_spec_delta(PyObject *spec) -{ - static char *optnames[] = {"id", "dist", NULL}; - PyObject *id; - uint32_t dist = 1; - lzma_options_delta *options; - - if (!PyArg_ParseTupleAndKeywords(empty_tuple, spec, "|OO&", optnames, - &id, uint32_converter, &dist)) { - PyErr_SetString(PyExc_ValueError, - "Invalid filter specifier for delta filter"); - return NULL; - } - - options = (lzma_options_delta *)PyMem_Malloc(sizeof *options); - if (options == NULL) - return PyErr_NoMemory(); - memset(options, 0, sizeof *options); - options->type = LZMA_DELTA_TYPE_BYTE; - options->dist = dist; - return options; -} - -static void * -parse_filter_spec_bcj(PyObject *spec) -{ - static char *optnames[] = {"id", "start_offset", NULL}; - PyObject *id; - uint32_t start_offset = 0; - lzma_options_bcj *options; - - if (!PyArg_ParseTupleAndKeywords(empty_tuple, spec, "|OO&", optnames, - &id, uint32_converter, &start_offset)) { - PyErr_SetString(PyExc_ValueError, - "Invalid filter specifier for BCJ filter"); - return NULL; - } - - options = (lzma_options_bcj *)PyMem_Malloc(sizeof *options); - if (options == NULL) - return PyErr_NoMemory(); - memset(options, 0, sizeof *options); - options->start_offset = start_offset; - return options; -} - -static void * -parse_filter_spec(lzma_filter *f, PyObject *spec) -{ - PyObject *id_obj; - - if (!PyMapping_Check(spec)) { - PyErr_SetString(PyExc_TypeError, - "Filter specifier must be a dict or dict-like object"); - return NULL; - } - id_obj = PyMapping_GetItemString(spec, "id"); - if (id_obj == NULL) { - if (PyErr_ExceptionMatches(PyExc_KeyError)) - PyErr_SetString(PyExc_ValueError, - "Filter specifier must have an \"id\" entry"); - return NULL; - } - f->id = PyLong_AsUnsignedLongLong(id_obj); - Py_DECREF(id_obj); - if (PyErr_Occurred()) - return NULL; - - switch (f->id) { - case LZMA_FILTER_LZMA1: - case LZMA_FILTER_LZMA2: - f->options = parse_filter_spec_lzma(spec); - return f->options; - case LZMA_FILTER_DELTA: - f->options = parse_filter_spec_delta(spec); - return f->options; - case LZMA_FILTER_X86: - case LZMA_FILTER_POWERPC: - case LZMA_FILTER_IA64: - case LZMA_FILTER_ARM: - case LZMA_FILTER_ARMTHUMB: - case LZMA_FILTER_SPARC: - f->options = parse_filter_spec_bcj(spec); - return f->options; - default: - PyErr_Format(PyExc_ValueError, "Invalid filter ID: %llu", f->id); - return NULL; - } -} - -static void -free_filter_chain(lzma_filter filters[]) -{ - int i; - - for (i = 0; filters[i].id != LZMA_VLI_UNKNOWN; i++) - PyMem_Free(filters[i].options); -} - -static int -parse_filter_chain_spec(lzma_filter filters[], PyObject *filterspecs) -{ - Py_ssize_t i, num_filters; - - num_filters = PySequence_Length(filterspecs); - if (num_filters == -1) - return -1; - if (num_filters > LZMA_FILTERS_MAX) { - PyErr_Format(PyExc_ValueError, - "Too many filters - liblzma supports a maximum of %d", - LZMA_FILTERS_MAX); - return -1; - } - - for (i = 0; i < num_filters; i++) { - int ok = 1; - PyObject *spec = PySequence_GetItem(filterspecs, i); - if (spec == NULL || parse_filter_spec(&filters[i], spec) == NULL) - ok = 0; - Py_XDECREF(spec); - if (!ok) { - filters[i].id = LZMA_VLI_UNKNOWN; - free_filter_chain(filters); - return -1; - } - } - filters[num_filters].id = LZMA_VLI_UNKNOWN; - return 0; -} - - -/* LZMACompressor class. */ - -static PyObject * -compress(Compressor *c, uint8_t *data, size_t len, lzma_action action) -{ - size_t data_size = 0; - PyObject *result; - - result = PyBytes_FromStringAndSize(NULL, INITIAL_BUFFER_SIZE); - if (result == NULL) - return NULL; - c->lzs.next_in = data; - c->lzs.avail_in = len; - c->lzs.next_out = (uint8_t *)PyBytes_AS_STRING(result); - c->lzs.avail_out = PyBytes_GET_SIZE(result); - for (;;) { - lzma_ret lzret; - - Py_BEGIN_ALLOW_THREADS - lzret = lzma_code(&c->lzs, action); - data_size = (char *)c->lzs.next_out - PyBytes_AS_STRING(result); - Py_END_ALLOW_THREADS - if (catch_lzma_error(lzret)) - goto error; - if ((action == LZMA_RUN && c->lzs.avail_in == 0) || - (action == LZMA_FINISH && lzret == LZMA_STREAM_END)) { - break; - } else if (c->lzs.avail_out == 0) { - if (grow_buffer(&result) == -1) - goto error; - c->lzs.next_out = (uint8_t *)PyBytes_AS_STRING(result) + data_size; - c->lzs.avail_out = PyBytes_GET_SIZE(result) - data_size; - } - } - if (data_size != PyBytes_GET_SIZE(result)) - if (_PyBytes_Resize(&result, data_size) == -1) - goto error; - return result; - -error: - Py_XDECREF(result); - return NULL; -} - -PyDoc_STRVAR(Compressor_compress_doc, -"compress(data) -> bytes\n" -"\n" -"Provide data to the compressor object. Returns a chunk of\n" -"compressed data if possible, or b\"\" otherwise.\n" -"\n" -"When you have finished providing data to the compressor, call the\n" -"flush() method to finish the conversion process.\n"); - -static PyObject * -Compressor_compress(Compressor *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *result = NULL; - - if (!PyArg_ParseTuple(args, "s*:compress", &buffer)) - return NULL; - - ACQUIRE_LOCK(self); - if (self->flushed) - PyErr_SetString(PyExc_ValueError, "Compressor has been flushed"); - else - result = compress(self, buffer.buf, buffer.len, LZMA_RUN); - RELEASE_LOCK(self); - PyBuffer_Release(&buffer); - return result; -} - -PyDoc_STRVAR(Compressor_flush_doc, -"flush() -> bytes\n" -"\n" -"Finish the compression process. Returns the compressed data left\n" -"in internal buffers.\n" -"\n" -"The compressor object cannot be used after this method is called.\n"); - -static PyObject * -Compressor_flush(Compressor *self, PyObject *noargs) -{ - PyObject *result = NULL; - - ACQUIRE_LOCK(self); - if (self->flushed) { - PyErr_SetString(PyExc_ValueError, "Repeated call to flush()"); - } else { - self->flushed = 1; - result = compress(self, NULL, 0, LZMA_FINISH); - } - RELEASE_LOCK(self); - return result; -} - -static int -Compressor_init_xz(lzma_stream *lzs, int check, uint32_t preset, - PyObject *filterspecs) -{ - lzma_ret lzret; - - if (filterspecs == Py_None) { - lzret = lzma_easy_encoder(lzs, preset, check); - } else { - lzma_filter filters[LZMA_FILTERS_MAX + 1]; - - if (parse_filter_chain_spec(filters, filterspecs) == -1) - return -1; - lzret = lzma_stream_encoder(lzs, filters, check); - free_filter_chain(filters); - } - if (catch_lzma_error(lzret)) - return -1; - else - return 0; -} - -static int -Compressor_init_alone(lzma_stream *lzs, uint32_t preset, PyObject *filterspecs) -{ - lzma_ret lzret; - - if (filterspecs == Py_None) { - lzma_options_lzma options; - - if (lzma_lzma_preset(&options, preset)) { - PyErr_Format(Error, "Invalid compression preset: %#x", preset); - return -1; - } - lzret = lzma_alone_encoder(lzs, &options); - } else { - lzma_filter filters[LZMA_FILTERS_MAX + 1]; - - if (parse_filter_chain_spec(filters, filterspecs) == -1) - return -1; - if (filters[0].id == LZMA_FILTER_LZMA1 && - filters[1].id == LZMA_VLI_UNKNOWN) { - lzret = lzma_alone_encoder(lzs, filters[0].options); - } else { - PyErr_SetString(PyExc_ValueError, - "Invalid filter chain for FORMAT_ALONE - " - "must be a single LZMA1 filter"); - lzret = LZMA_PROG_ERROR; - } - free_filter_chain(filters); - } - if (PyErr_Occurred() || catch_lzma_error(lzret)) - return -1; - else - return 0; -} - -static int -Compressor_init_raw(lzma_stream *lzs, PyObject *filterspecs) -{ - lzma_filter filters[LZMA_FILTERS_MAX + 1]; - lzma_ret lzret; - - if (filterspecs == Py_None) { - PyErr_SetString(PyExc_ValueError, - "Must specify filters for FORMAT_RAW"); - return -1; - } - if (parse_filter_chain_spec(filters, filterspecs) == -1) - return -1; - lzret = lzma_raw_encoder(lzs, filters); - free_filter_chain(filters); - if (catch_lzma_error(lzret)) - return -1; - else - return 0; -} - -static int -Compressor_init(Compressor *self, PyObject *args, PyObject *kwargs) -{ - static char *arg_names[] = {"format", "check", "preset", "filters", NULL}; - int format = FORMAT_XZ; - int check = -1; - uint32_t preset = LZMA_PRESET_DEFAULT; - PyObject *preset_obj = Py_None; - PyObject *filterspecs = Py_None; - - if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "|iiOO:LZMACompressor", arg_names, - &format, &check, &preset_obj, - &filterspecs)) - return -1; - - if (format != FORMAT_XZ && check != -1 && check != LZMA_CHECK_NONE) { - PyErr_SetString(PyExc_ValueError, - "Integrity checks are only supported by FORMAT_XZ"); - return -1; - } - - if (preset_obj != Py_None && filterspecs != Py_None) { - PyErr_SetString(PyExc_ValueError, - "Cannot specify both preset and filter chain"); - return -1; - } - - if (preset_obj != Py_None) - if (!uint32_converter(preset_obj, &preset)) - return -1; - -#ifdef WITH_THREAD - self->lock = PyThread_allocate_lock(); - if (self->lock == NULL) { - PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock"); - return -1; - } -#endif - - self->flushed = 0; - switch (format) { - case FORMAT_XZ: - if (check == -1) - check = LZMA_CHECK_CRC64; - if (Compressor_init_xz(&self->lzs, check, preset, filterspecs) != 0) - break; - return 0; - - case FORMAT_ALONE: - if (Compressor_init_alone(&self->lzs, preset, filterspecs) != 0) - break; - return 0; - - case FORMAT_RAW: - if (Compressor_init_raw(&self->lzs, filterspecs) != 0) - break; - return 0; - - default: - PyErr_Format(PyExc_ValueError, - "Invalid container format: %d", format); - break; - } - -#ifdef WITH_THREAD - PyThread_free_lock(self->lock); - self->lock = NULL; -#endif - return -1; -} - -static void -Compressor_dealloc(Compressor *self) -{ - lzma_end(&self->lzs); -#ifdef WITH_THREAD - if (self->lock != NULL) - PyThread_free_lock(self->lock); -#endif - Py_TYPE(self)->tp_free((PyObject *)self); -} - -static PyMethodDef Compressor_methods[] = { - {"compress", (PyCFunction)Compressor_compress, METH_VARARGS, - Compressor_compress_doc}, - {"flush", (PyCFunction)Compressor_flush, METH_NOARGS, - Compressor_flush_doc}, - {NULL} -}; - -PyDoc_STRVAR(Compressor_doc, -"LZMACompressor(format=FORMAT_XZ, check=-1, preset=None, filters=None)\n" -"\n" -"Create a compressor object for compressing data incrementally.\n" -"\n" -"format specifies the container format to use for the output. This can\n" -"be FORMAT_XZ (default), FORMAT_ALONE, or FORMAT_RAW.\n" -"\n" -"check specifies the integrity check to use. For FORMAT_XZ, the default\n" -"is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not suport integrity\n" -"checks; for these formats, check must be omitted, or be CHECK_NONE.\n" -"\n" -"The settings used by the compressor can be specified either as a\n" -"preset compression level (with the 'preset' argument), or in detail\n" -"as a custom filter chain (with the 'filters' argument). For FORMAT_XZ\n" -"and FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset\n" -"level. For FORMAT_RAW, the caller must always specify a filter chain;\n" -"the raw compressor does not support preset compression levels.\n" -"\n" -"preset (if provided) should be an integer in the range 0-9, optionally\n" -"OR-ed with the constant PRESET_EXTREME.\n" -"\n" -"filters (if provided) should be a sequence of dicts. Each dict should\n" -"have an entry for \"id\" indicating the ID of the filter, plus\n" -"additional entries for options to the filter.\n" -"\n" -"For one-shot compression, use the compress() function instead.\n"); - -static PyTypeObject Compressor_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_lzma.LZMACompressor", /* tp_name */ - sizeof(Compressor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)Compressor_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_reserved */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - Compressor_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Compressor_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Compressor_init, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ -}; - - -/* LZMADecompressor class. */ - -static PyObject * -decompress(Decompressor *d, uint8_t *data, size_t len) -{ - size_t data_size = 0; - PyObject *result; - - result = PyBytes_FromStringAndSize(NULL, INITIAL_BUFFER_SIZE); - if (result == NULL) - return NULL; - d->lzs.next_in = data; - d->lzs.avail_in = len; - d->lzs.next_out = (uint8_t *)PyBytes_AS_STRING(result); - d->lzs.avail_out = PyBytes_GET_SIZE(result); - for (;;) { - lzma_ret lzret; - - Py_BEGIN_ALLOW_THREADS - lzret = lzma_code(&d->lzs, LZMA_RUN); - data_size = (char *)d->lzs.next_out - PyBytes_AS_STRING(result); - Py_END_ALLOW_THREADS - if (catch_lzma_error(lzret)) - goto error; - if (lzret == LZMA_GET_CHECK || lzret == LZMA_NO_CHECK) - d->check = lzma_get_check(&d->lzs); - if (lzret == LZMA_STREAM_END) { - d->eof = 1; - if (d->lzs.avail_in > 0) { - Py_CLEAR(d->unused_data); - d->unused_data = PyBytes_FromStringAndSize( - (char *)d->lzs.next_in, d->lzs.avail_in); - if (d->unused_data == NULL) - goto error; - } - break; - } else if (d->lzs.avail_in == 0) { - break; - } else if (d->lzs.avail_out == 0) { - if (grow_buffer(&result) == -1) - goto error; - d->lzs.next_out = (uint8_t *)PyBytes_AS_STRING(result) + data_size; - d->lzs.avail_out = PyBytes_GET_SIZE(result) - data_size; - } - } - if (data_size != PyBytes_GET_SIZE(result)) - if (_PyBytes_Resize(&result, data_size) == -1) - goto error; - return result; - -error: - Py_XDECREF(result); - return NULL; -} - -PyDoc_STRVAR(Decompressor_decompress_doc, -"decompress(data) -> bytes\n" -"\n" -"Provide data to the decompressor object. Returns a chunk of\n" -"decompressed data if possible, or b\"\" otherwise.\n" -"\n" -"Attempting to decompress data after the end of the stream is\n" -"reached raises an EOFError. Any data found after the end of the\n" -"stream is ignored, and saved in the unused_data attribute.\n"); - -static PyObject * -Decompressor_decompress(Decompressor *self, PyObject *args) -{ - Py_buffer buffer; - PyObject *result = NULL; - - if (!PyArg_ParseTuple(args, "s*:decompress", &buffer)) - return NULL; - - ACQUIRE_LOCK(self); - if (self->eof) - PyErr_SetString(PyExc_EOFError, "Already at end of stream"); - else - result = decompress(self, buffer.buf, buffer.len); - RELEASE_LOCK(self); - PyBuffer_Release(&buffer); - return result; -} - -static int -Decompressor_init_raw(lzma_stream *lzs, PyObject *filterspecs) -{ - lzma_filter filters[LZMA_FILTERS_MAX + 1]; - lzma_ret lzret; - - if (parse_filter_chain_spec(filters, filterspecs) == -1) - return -1; - lzret = lzma_raw_decoder(lzs, filters); - free_filter_chain(filters); - if (catch_lzma_error(lzret)) - return -1; - else - return 0; -} - -static int -Decompressor_init(Decompressor *self, PyObject *args, PyObject *kwargs) -{ - static char *arg_names[] = {"format", "memlimit", "filters", NULL}; - const uint32_t decoder_flags = LZMA_TELL_ANY_CHECK | LZMA_TELL_NO_CHECK; - int format = FORMAT_AUTO; - uint64_t memlimit = UINT64_MAX; - PyObject *memlimit_obj = Py_None; - PyObject *filterspecs = Py_None; - lzma_ret lzret; - - if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "|iOO:LZMADecompressor", arg_names, - &format, &memlimit_obj, &filterspecs)) - return -1; - - if (memlimit_obj != Py_None) { - if (format == FORMAT_RAW) { - PyErr_SetString(PyExc_ValueError, - "Cannot specify memory limit with FORMAT_RAW"); - return -1; - } - memlimit = PyLong_AsUnsignedLongLong(memlimit_obj); - if (PyErr_Occurred()) - return -1; - } - - if (format == FORMAT_RAW && filterspecs == Py_None) { - PyErr_SetString(PyExc_ValueError, - "Must specify filters for FORMAT_RAW"); - return -1; - } else if (format != FORMAT_RAW && filterspecs != Py_None) { - PyErr_SetString(PyExc_ValueError, - "Cannot specify filters except with FORMAT_RAW"); - return -1; - } - -#ifdef WITH_THREAD - self->lock = PyThread_allocate_lock(); - if (self->lock == NULL) { - PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock"); - return -1; - } -#endif - - self->check = LZMA_CHECK_UNKNOWN; - self->unused_data = PyBytes_FromStringAndSize(NULL, 0); - if (self->unused_data == NULL) - goto error; - - switch (format) { - case FORMAT_AUTO: - lzret = lzma_auto_decoder(&self->lzs, memlimit, decoder_flags); - if (catch_lzma_error(lzret)) - break; - return 0; - - case FORMAT_XZ: - lzret = lzma_stream_decoder(&self->lzs, memlimit, decoder_flags); - if (catch_lzma_error(lzret)) - break; - return 0; - - case FORMAT_ALONE: - self->check = LZMA_CHECK_NONE; - lzret = lzma_alone_decoder(&self->lzs, memlimit); - if (catch_lzma_error(lzret)) - break; - return 0; - - case FORMAT_RAW: - self->check = LZMA_CHECK_NONE; - if (Decompressor_init_raw(&self->lzs, filterspecs) == -1) - break; - return 0; - - default: - PyErr_Format(PyExc_ValueError, - "Invalid container format: %d", format); - break; - } - -error: - Py_CLEAR(self->unused_data); -#ifdef WITH_THREAD - PyThread_free_lock(self->lock); - self->lock = NULL; -#endif - return -1; -} - -static void -Decompressor_dealloc(Decompressor *self) -{ - lzma_end(&self->lzs); - Py_CLEAR(self->unused_data); -#ifdef WITH_THREAD - if (self->lock != NULL) - PyThread_free_lock(self->lock); -#endif - Py_TYPE(self)->tp_free((PyObject *)self); -} - -static PyMethodDef Decompressor_methods[] = { - {"decompress", (PyCFunction)Decompressor_decompress, METH_VARARGS, - Decompressor_decompress_doc}, - {NULL} -}; - -PyDoc_STRVAR(Decompressor_check_doc, -"ID of the integrity check used by the input stream."); - -PyDoc_STRVAR(Decompressor_eof_doc, -"True if the end-of-stream marker has been reached."); - -PyDoc_STRVAR(Decompressor_unused_data_doc, -"Data found after the end of the compressed stream."); - -static PyMemberDef Decompressor_members[] = { - {"check", T_INT, offsetof(Decompressor, check), READONLY, - Decompressor_check_doc}, - {"eof", T_BOOL, offsetof(Decompressor, eof), READONLY, - Decompressor_eof_doc}, - {"unused_data", T_OBJECT_EX, offsetof(Decompressor, unused_data), READONLY, - Decompressor_unused_data_doc}, - {NULL} -}; - -PyDoc_STRVAR(Decompressor_doc, -"LZMADecompressor(format=FORMAT_AUTO, memlimit=None, filters=None)\n" -"\n" -"Create a decompressor object for decompressing data incrementally.\n" -"\n" -"format specifies the container format of the input stream. If this is\n" -"FORMAT_AUTO (the default), the decompressor will automatically detect\n" -"whether the input is FORMAT_XZ or FORMAT_ALONE. Streams created with\n" -"FORMAT_RAW cannot be autodetected.\n" -"\n" -"memlimit can be specified to limit the amount of memory used by the\n" -"decompressor. This will cause decompression to fail if the input\n" -"cannot be decompressed within the given limit.\n" -"\n" -"filters specifies a custom filter chain. This argument is required for\n" -"FORMAT_RAW, and not accepted with any other format. When provided,\n" -"this should be a sequence of dicts, each indicating the ID and options\n" -"for a single filter.\n" -"\n" -"For one-shot decompression, use the decompress() function instead.\n"); - -static PyTypeObject Decompressor_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_lzma.LZMADecompressor", /* tp_name */ - sizeof(Decompressor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)Decompressor_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_reserved */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - Decompressor_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Decompressor_methods, /* tp_methods */ - Decompressor_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Decompressor_init, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ -}; - - -/* Module-level functions. */ - -PyDoc_STRVAR(check_is_supported_doc, -"check_is_supported(check_id) -> bool\n" -"\n" -"Test whether the given integrity check is supported.\n" -"\n" -"Always returns True for CHECK_NONE and CHECK_CRC32.\n"); - -static PyObject * -check_is_supported(PyObject *self, PyObject *args) -{ - int check_id; - - if (!PyArg_ParseTuple(args, "i:check_is_supported", &check_id)) - return NULL; - - return PyBool_FromLong(lzma_check_is_supported(check_id)); -} - - -/* Module initialization. */ - -static PyMethodDef module_methods[] = { - {"check_is_supported", (PyCFunction)check_is_supported, - METH_VARARGS, check_is_supported_doc}, - {NULL} -}; - -/* Some of our constants are more than 32 bits wide, so PyModule_AddIntConstant - would not work correctly on platforms with 32-bit longs. */ -static int -module_add_int_constant(PyObject *m, const char *name, PY_LONG_LONG value) -{ - PyObject *o = PyLong_FromLongLong(value); - if (o == NULL) - return -1; - if (PyModule_AddObject(m, name, o) == 0) - return 0; - Py_DECREF(o); - return -1; -} - -#define ADD_INT_PREFIX_MACRO(m, macro) \ - module_add_int_constant(m, #macro, LZMA_ ## macro) - -void init_lzma(void) -{ - PyObject *m; - - empty_tuple = PyTuple_New(0); - if (empty_tuple == NULL) - return; - - m = Py_InitModule("_lzma", module_methods); - if (m == NULL) - return; - - if (PyModule_AddIntMacro(m, FORMAT_AUTO) == -1 || - PyModule_AddIntMacro(m, FORMAT_XZ) == -1 || - PyModule_AddIntMacro(m, FORMAT_ALONE) == -1 || - PyModule_AddIntMacro(m, FORMAT_RAW) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_NONE) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_CRC32) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_CRC64) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_SHA256) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_ID_MAX) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_UNKNOWN) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_LZMA1) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_LZMA2) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_DELTA) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_X86) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_IA64) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_ARM) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_ARMTHUMB) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_SPARC) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_POWERPC) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_HC3) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_HC4) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_BT2) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_BT3) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_BT4) == -1 || - ADD_INT_PREFIX_MACRO(m, MODE_FAST) == -1 || - ADD_INT_PREFIX_MACRO(m, MODE_NORMAL) == -1 || - ADD_INT_PREFIX_MACRO(m, PRESET_DEFAULT) == -1 || - ADD_INT_PREFIX_MACRO(m, PRESET_EXTREME) == -1) - return; - - Error = PyErr_NewExceptionWithDoc( - "_lzma.LZMAError", "Call to liblzma failed.", NULL, NULL); - if (Error == NULL) - return; - Py_INCREF(Error); - if (PyModule_AddObject(m, "LZMAError", Error) == -1) - return; - - if (PyType_Ready(&Compressor_type) == -1) - return; - Py_INCREF(&Compressor_type); - if (PyModule_AddObject(m, "LZMACompressor", - (PyObject *)&Compressor_type) == -1) - return; - - if (PyType_Ready(&Decompressor_type) == -1) - return; - Py_INCREF(&Decompressor_type); - if (PyModule_AddObject(m, "LZMADecompressor", - (PyObject *)&Decompressor_type) == -1) - return; - - return m; -} diff --git a/src/_pakfire/_pakfiremodule.c b/src/_pakfire/_pakfiremodule.c index fc2d953c6..84702e5ba 100644 --- a/src/_pakfire/_pakfiremodule.c +++ b/src/_pakfire/_pakfiremodule.c @@ -191,89 +191,96 @@ static PyMethodDef Transaction_methods[] = { { NULL, NULL, 0, NULL } }; -void init_pakfire(void) { +static struct PyModuleDef moduledef = { + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "_pakfire", + .m_size = -1, + .m_methods = pakfireModuleMethods, +}; + +PyMODINIT_FUNC PyInit__pakfire(void) { /* Initialize locale */ setlocale(LC_ALL, ""); bindtextdomain(PACKAGE_TARNAME, "/usr/share/locale"); textdomain(PACKAGE_TARNAME); - /* Load the python module */ - PyObject *m, *d; - - m = Py_InitModule("_pakfire", pakfireModuleMethods); + // Create the module + PyObject* module = PyModule_Create(&moduledef); + if (!module) + return NULL; // Pool PoolType.tp_methods = Pool_methods; if (PyType_Ready(&PoolType) < 0) - return; + return NULL; Py_INCREF(&PoolType); - PyModule_AddObject(m, "Pool", (PyObject *)&PoolType); + PyModule_AddObject(module, "Pool", (PyObject *)&PoolType); // Problem ProblemType.tp_methods = Problem_methods; if (PyType_Ready(&ProblemType) < 0) - return; + return NULL; Py_INCREF(&ProblemType); - PyModule_AddObject(m, "Problem", (PyObject *)&ProblemType); + PyModule_AddObject(module, "Problem", (PyObject *)&ProblemType); // Repo RepoType.tp_methods = Repo_methods; if (PyType_Ready(&RepoType) < 0) - return; + return NULL; Py_INCREF(&RepoType); - PyModule_AddObject(m, "Repo", (PyObject *)&RepoType); + PyModule_AddObject(module, "Repo", (PyObject *)&RepoType); // Solvable SolvableType.tp_methods = Solvable_methods; if (PyType_Ready(&SolvableType) < 0) - return; + return NULL; Py_INCREF(&SolvableType); - PyModule_AddObject(m, "Solvable", (PyObject *)&SolvableType); + PyModule_AddObject(module, "Solvable", (PyObject *)&SolvableType); // Relation RelationType.tp_methods = Relation_methods; if (PyType_Ready(&RelationType) < 0) - return; + return NULL; Py_INCREF(&RelationType); - PyModule_AddObject(m, "Relation", (PyObject *)&RelationType); + PyModule_AddObject(module, "Relation", (PyObject *)&RelationType); // Request RequestType.tp_methods = Request_methods; if (PyType_Ready(&RequestType) < 0) - return; + return NULL; Py_INCREF(&RequestType); - PyModule_AddObject(m, "Request", (PyObject *)&RequestType); + PyModule_AddObject(module, "Request", (PyObject *)&RequestType); // Solution SolutionType.tp_methods = Solution_methods; if (PyType_Ready(&SolutionType) < 0) - return; + return NULL; Py_INCREF(&SolutionType); - PyModule_AddObject(m, "Solution", (PyObject *)&SolutionType); + PyModule_AddObject(module, "Solution", (PyObject *)&SolutionType); // Solver SolverType.tp_methods = Solver_methods; if (PyType_Ready(&SolverType) < 0) - return; + return NULL; Py_INCREF(&SolverType); - PyModule_AddObject(m, "Solver", (PyObject *)&SolverType); + PyModule_AddObject(module, "Solver", (PyObject *)&SolverType); // Step StepType.tp_methods = Step_methods; if (PyType_Ready(&StepType) < 0) - return; + return NULL; Py_INCREF(&StepType); - PyModule_AddObject(m, "Step", (PyObject *)&StepType); + PyModule_AddObject(module, "Step", (PyObject *)&StepType); // Transaction TransactionType.tp_methods = Transaction_methods; if (PyType_Ready(&TransactionType) < 0) - return; + return NULL; Py_INCREF(&TransactionType); - PyModule_AddObject(m, "Transaction", (PyObject *)&TransactionType); + PyModule_AddObject(module, "Transaction", (PyObject *)&TransactionType); // Add constants - d = PyModule_GetDict(m); + PyObject* d = PyModule_GetDict(module); // Personalities PyDict_SetItemString(d, "PERSONALITY_LINUX", Py_BuildValue("i", PER_LINUX)); @@ -332,4 +339,6 @@ void init_pakfire(void) { PyDict_SetItemString(d, "SOLVER_FLAG_NO_UPDATEPROVIDE", Py_BuildValue("i", SOLVER_FLAG_NO_UPDATEPROVIDE)); PyDict_SetItemString(d, "SOLVER_FLAG_SPLITPROVIDES", Py_BuildValue("i", SOLVER_FLAG_SPLITPROVIDES)); PyDict_SetItemString(d, "SOLVER_FLAG_IGNORE_RECOMMENDED", Py_BuildValue("i", SOLVER_FLAG_IGNORE_RECOMMENDED)); + + return module; } diff --git a/src/_pakfire/pool.c b/src/_pakfire/pool.c index fa23fadb7..ed69a2cb7 100644 --- a/src/_pakfire/pool.c +++ b/src/_pakfire/pool.c @@ -30,7 +30,7 @@ #include "solvable.h" PyTypeObject PoolType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Pool", tp_basicsize: sizeof(PoolObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -71,7 +71,7 @@ PyObject* Pool_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { PyObject *Pool_dealloc(PoolObject *self) { pool_free(self->_pool); - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/problem.c b/src/_pakfire/problem.c index 6dbdf5cfc..3a15cee17 100644 --- a/src/_pakfire/problem.c +++ b/src/_pakfire/problem.c @@ -29,7 +29,7 @@ #include "solver.h" PyTypeObject ProblemType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Problem", tp_basicsize: sizeof(ProblemObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -64,7 +64,7 @@ PyObject* Problem_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Problem_dealloc(ProblemObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/relation.c b/src/_pakfire/relation.c index ff77542ad..ec5167886 100644 --- a/src/_pakfire/relation.c +++ b/src/_pakfire/relation.c @@ -24,7 +24,7 @@ #define REL_NONE 0 PyTypeObject RelationType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Relation", tp_basicsize: sizeof(RelationObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -64,7 +64,7 @@ PyObject* Relation_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Relation_dealloc(RelationObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/repo.c b/src/_pakfire/repo.c index 9e91bd370..ac317195e 100644 --- a/src/_pakfire/repo.c +++ b/src/_pakfire/repo.c @@ -31,7 +31,7 @@ #include "solvable.h" PyTypeObject RepoType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Repo", tp_basicsize: sizeof(RepoObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -66,7 +66,7 @@ PyObject* Repo_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Repo_dealloc(RepoObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/request.c b/src/_pakfire/request.c index 7d7c5b200..a899b9ce7 100644 --- a/src/_pakfire/request.c +++ b/src/_pakfire/request.c @@ -26,7 +26,7 @@ #include PyTypeObject RequestType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Request", tp_basicsize: sizeof(RequestObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -58,7 +58,7 @@ PyObject* Request_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Request_dealloc(RequestObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/solution.c b/src/_pakfire/solution.c index 36de8d832..22651e610 100644 --- a/src/_pakfire/solution.c +++ b/src/_pakfire/solution.c @@ -28,7 +28,7 @@ #include "solution.h" PyTypeObject SolutionType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Solution", tp_basicsize: sizeof(SolutionObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -59,7 +59,7 @@ PyObject *Solution_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Solution_dealloc(SolutionObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/solvable.c b/src/_pakfire/solvable.c index 04c605d65..999ff4357 100644 --- a/src/_pakfire/solvable.c +++ b/src/_pakfire/solvable.c @@ -26,7 +26,7 @@ #include "solvable.h" PyTypeObject SolvableType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Solvable", tp_basicsize: sizeof(SolvableObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -71,7 +71,7 @@ PyObject* Solvable_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Solvable_dealloc(SolvableObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/solver.c b/src/_pakfire/solver.c index 87704df7e..aaca762b8 100644 --- a/src/_pakfire/solver.c +++ b/src/_pakfire/solver.c @@ -26,7 +26,7 @@ #include PyTypeObject SolverType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Solver", tp_basicsize: sizeof(SolverObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -64,7 +64,7 @@ PyObject* Solver_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { PyObject *Solver_dealloc(SolverObject *self) { solver_free(self->_solver); - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/step.c b/src/_pakfire/step.c index 50e5ec060..32c6b68c2 100644 --- a/src/_pakfire/step.c +++ b/src/_pakfire/step.c @@ -23,7 +23,7 @@ #include "transaction.h" PyTypeObject StepType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Step", tp_basicsize: sizeof(StepObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -57,7 +57,7 @@ PyObject* Step_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { } PyObject *Step_dealloc(StepObject *self) { - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/_pakfire/transaction.c b/src/_pakfire/transaction.c index 611afe207..cbda77577 100644 --- a/src/_pakfire/transaction.c +++ b/src/_pakfire/transaction.c @@ -27,7 +27,7 @@ #include "transaction.h" PyTypeObject TransactionType = { - PyObject_HEAD_INIT(NULL) + PyVarObject_HEAD_INIT(NULL, 0) tp_name: "_pakfire.Transaction", tp_basicsize: sizeof(TransactionObject), tp_flags: Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, @@ -62,7 +62,7 @@ PyObject* Transaction_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { PyObject *Transaction_dealloc(TransactionObject *self) { /* XXX need to free self->_transaction */ - self->ob_type->tp_free((PyObject *)self); + Py_TYPE(self)->tp_free((PyObject *)self); Py_RETURN_NONE; } diff --git a/src/pakfire/__init__.py b/src/pakfire/__init__.py index b1ab92274..43aaae1f6 100644 --- a/src/pakfire/__init__.py +++ b/src/pakfire/__init__.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,8 +19,11 @@ # # ############################################################################### -from base import Pakfire, PakfireBuilder, PakfireServer - -from constants import PAKFIRE_VERSION +from . import base +from .constants import PAKFIRE_VERSION __version__ = PAKFIRE_VERSION + +Pakfire = base.Pakfire +PakfireBuilder = base.PakfireBuilder +PakfireServer = base.PakfireServer diff --git a/src/pakfire/actions.py b/src/pakfire/actions.py index 727776169..4b09a43d1 100644 --- a/src/pakfire/actions.py +++ b/src/pakfire/actions.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -22,15 +22,15 @@ import os import sys -import packages -import shell -import util +from . import packages +from . import shell +from . import util import logging log = logging.getLogger("pakfire") -from constants import * -from i18n import _ +from .constants import * +from .i18n import _ class Action(object): def __init__(self, pakfire, pkg_solv, pkg_bin=None): @@ -66,13 +66,13 @@ class Action(object): # Check if there are any signatures at all. if not self.pkg.signatures: - raise SignatureError, _("%s has got no signatures") % self.pkg.friendly_name + raise SignatureError(_("%s has got no signatures") % self.pkg.friendly_name) # Run the verification process and save the result. sigs = self.pkg.verify() if not sigs: - raise SignatureError, _("%s has got no valid signatures") % self.pkg.friendly_name + raise SignatureError(_("%s has got no valid signatures") % self.pkg.friendly_name) @property def pkg(self): @@ -95,7 +95,7 @@ class Action(object): def _set_pkg_bin(self, pkg): if pkg and not self.pkg_solv.uuid == pkg.uuid: - raise RuntimeError, "Not the same package: %s != %s" % (self.pkg_solv, pkg) + raise RuntimeError("Not the same package: %s != %s" % (self.pkg_solv, pkg)) self._pkg_bin = pkg @@ -232,7 +232,7 @@ class ActionScript(Action): self.run_python() else: - raise ActionError, _("Could not handle scriptlet of unknown type. Skipping.") + raise ActionError(_("Could not handle scriptlet of unknown type. Skipping.")) def run_exec(self): log.debug(_("Executing scriptlet...")) @@ -241,12 +241,12 @@ class ActionScript(Action): if self.interpreter: interpreter = "%s/%s" % (self.pakfire.path, self.interpreter) if not os.path.exists(interpreter): - raise ActionError, _("Cannot run scriptlet because no interpreter is available: %s" \ - % self.interpreter) + raise ActionError(_("Cannot run scriptlet because no interpreter is available: %s" \ + % self.interpreter)) if not os.access(interpreter, os.X_OK): - raise ActionError, _("Cannot run scriptlet because the interpreter is not executable: %s" \ - % self.interpreter) + raise ActionError(_("Cannot run scriptlet because the interpreter is not executable: %s" \ + % self.interpreter)) # Create a name for the temporary script file. script_file_chroot = os.path.join("/", LOCAL_TMP_PATH, @@ -283,15 +283,15 @@ class ActionScript(Action): try: self.execute(command) - except ShellEnvironmentError, e: - raise ActionError, _("The scriptlet returned an error:\n%s" % e) + except ShellEnvironmentError as e: + raise ActionError(_("The scriptlet returned an error:\n%s" % e)) except commandTimeoutExpired: - raise ActionError, _("The scriptlet ran more than %s seconds and was killed." \ - % SCRIPTLET_TIMEOUT) + raise ActionError(_("The scriptlet ran more than %s seconds and was killed." \ + % SCRIPTLET_TIMEOUT)) - except Exception, e: - raise ActionError, _("The scriptlet returned with an unhandled error:\n%s" % e) + except Exception as e: + raise ActionError(_("The scriptlet returned with an unhandled error:\n%s" % e)) finally: # Remove the script file. @@ -329,8 +329,8 @@ class ActionScript(Action): obj = compile(self.scriptlet, "", "exec") eval(obj, _globals, {}) - except Exception, e: - print _("Exception occured: %s") % e + except Exception as e: + print(_("Exception occured: %s") % e) os._exit(1) # End the child process without cleaning up. diff --git a/src/pakfire/base.py b/src/pakfire/base.py index 6b53a496c..cb1a34092 100644 --- a/src/pakfire/base.py +++ b/src/pakfire/base.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -23,25 +23,25 @@ import os import random import string -import actions -import builder -import config -import distro -import filelist -import keyring -import logger -import packages -import repository -import satsolver -import transaction -import util +from . import actions +from . import builder +from . import config +from . import distro +from . import filelist +from . import keyring +from . import logger +from . import packages +from . import repository +from . import satsolver +from . import transaction +from . import util import logging log = logging.getLogger("pakfire") -from config import Config -from constants import * -from i18n import _ +from .config import Config +from .constants import * +from .i18n import _ class Pakfire(object): mode = None @@ -67,7 +67,7 @@ class Pakfire(object): self.config = self._load_config(configs) # Update configuration with additional arguments. - for section, settings in kwargs.items(): + for section, settings in list(kwargs.items()): self.config.update(section, settings) # Dump the configuration. @@ -125,7 +125,7 @@ class Pakfire(object): def check_root_user(self): if not os.getuid() == 0 or not os.getgid() == 0: - raise Exception, "You must run pakfire as the root user." + raise Exception("You must run pakfire as the root user.") def check_host_arch(self, arch): """ @@ -137,15 +137,15 @@ class Pakfire(object): return True if not system.host_supports_arch(arch): - raise BuildError, "Cannot build for the target architecture: %s" % arch + raise BuildError("Cannot build for the target architecture: %s" % arch) - raise BuildError, arch + raise BuildError(arch) def check_is_ipfire(self): ret = os.path.exists("/etc/ipfire-release") if not ret: - raise NotAnIPFireSystemError, "You can run pakfire only on an IPFire system" + raise NotAnIPFireSystemError("You can run pakfire only on an IPFire system") @property def builder(self): @@ -519,7 +519,7 @@ class Pakfire(object): # Check, if a package with the name is already in the resultset # and always replace older ones by more recent ones. - if pkgs.has_key(pkg.name): + if pkg.name in pkgs: if pkgs[pkg.name] < pkg: pkgs[pkg.name] = pkg else: @@ -616,7 +616,7 @@ class Pakfire(object): b.build(stages=stages) except Error: - raise BuildError, _("Build command has failed.") + raise BuildError(_("Build command has failed.")) else: # If the build was successful, cleanup all temporary files. @@ -655,7 +655,7 @@ class PakfireBuilder(Pakfire): if not c.has_distro_conf(): log.error(_("You have not set the distribution for which you want to build.")) log.error(_("Please do so in builder.conf or on the CLI.")) - raise ConfigError, _("Distribution configuration is missing.") + raise ConfigError(_("Distribution configuration is missing.")) return c diff --git a/src/pakfire/builder.py b/src/pakfire/builder.py index b961f8e0a..67404ee97 100644 --- a/src/pakfire/builder.py +++ b/src/pakfire/builder.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -31,25 +31,22 @@ import tempfile import time import uuid -import base -import cgroup -import logger -import packages -import packages.file -import packages.packager -import repository -import shell -import util -import _pakfire +from . import _pakfire +from . import cgroup +from . import logger +from . import packages +from . import repository +from . import shell +from . import util import logging log = logging.getLogger("pakfire") -from config import ConfigBuilder -from system import system -from constants import * -from i18n import _ -from errors import BuildError, BuildRootLocked, Error +from .config import ConfigBuilder +from .system import system +from .constants import * +from .i18n import _ +from .errors import BuildError, BuildRootLocked, Error BUILD_LOG_HEADER = """ @@ -72,12 +69,9 @@ class BuildEnviron(object): def __init__(self, pakfire, filename=None, distro_name=None, build_id=None, logfile=None, release_build=True, **kwargs): self.pakfire = pakfire - # Check if the given pakfire instance is of the correct type. - assert isinstance(self.pakfire, base.PakfireBuilder) - # Check if this host can build the requested architecture. if not system.host_supports_arch(self.arch): - raise BuildError, _("Cannot build for %s on this host.") % self.arch + raise BuildError(_("Cannot build for %s on this host.") % self.arch) # Save the build id and generate one if no build id was provided. if not build_id: @@ -146,7 +140,7 @@ class BuildEnviron(object): # Log the package information. self.log.info(_("Package information:")) - for line in self.pkg.dump(long=True).splitlines(): + for line in self.pkg.dump(int=True).splitlines(): self.log.info(" %s" % line) self.log.info("") @@ -173,15 +167,15 @@ class BuildEnviron(object): # we try to fall back to just set CLONE_NEWNS. try: _pakfire.unshare(_pakfire.SCHED_CLONE_NEWNS|_pakfire.SCHED_CLONE_NEWIPC|_pakfire.SCHED_CLONE_NEWUTS) - except RuntimeError, e: + except RuntimeError as e: _pakfire.unshare(_pakfire.SCHED_CLONE_NEWNS) # Mount the directories. try: self._mountall() - except OSError, e: + except OSError as e: if e.errno == 30: # Read-only FS - raise BuildError, "Buildroot is read-only: %s" % self.pakfire.path + raise BuildError("Buildroot is read-only: %s" % self.pakfire.path) # Raise all other errors. raise @@ -295,13 +289,13 @@ class BuildEnviron(object): try: self._lock = open(filename, "a+") - except IOError, e: + except IOError as e: return 0 try: fcntl.lockf(self._lock.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) - except IOError, e: - raise BuildRootLocked, "Buildroot is locked" + except IOError as e: + raise BuildRootLocked("Buildroot is locked") return 1 @@ -464,7 +458,7 @@ class BuildEnviron(object): "logger" : self.log, }) - if not kwargs.has_key("allow_downgrade"): + if "allow_downgrade" not in kwargs: kwargs["allow_downgrade"] = True # Install everything. @@ -738,7 +732,7 @@ class BuildEnviron(object): # Environment variables env = self.environ - if kwargs.has_key("env"): + if "env" in kwargs: env.update(kwargs.pop("env")) self.log.debug("Environment:") @@ -776,11 +770,11 @@ class BuildEnviron(object): def build(self, install_test=True, prepare=False): if not self.pkg: - raise BuildError, _("You cannot run a build when no package was given.") + raise BuildError(_("You cannot run a build when no package was given.")) # Search for the package file in build_dir and raise BuildError if it is not present. if not os.path.exists(self.pkg_makefile): - raise BuildError, _("Could not find makefile in build root: %s") % self.pkg_makefile + raise BuildError(_("Could not find makefile in build root: %s") % self.pkg_makefile) # Write pakfire configuration into the chroot. self.write_config() @@ -835,7 +829,7 @@ class BuildEnviron(object): return # End here in case of an error. - raise BuildError, _("The build command failed. See logfile for details.") + raise BuildError(_("The build command failed. See logfile for details.")) def install_test(self): self.log.info(_("Running installation test...")) @@ -863,7 +857,7 @@ class BuildEnviron(object): if self.pakfire.distro.personality: command = "%s %s" % (self.pakfire.distro.personality, command) - for key, val in self.environ.items(): + for key, val in list(self.environ.items()): command = "%s=\"%s\" " % (key, val) + command # Empty the environment @@ -887,7 +881,7 @@ class BuildEnviron(object): files = self.find_result_packages() # Create a progressbar. - print _("Signing packages...") + print(_("Signing packages...")) p = util.make_progress(keyfp, len(files)) i = 0 @@ -906,7 +900,7 @@ class BuildEnviron(object): # Close progressbar. if p: p.finish() - print "" # Print an empty line. + print("") # Print an empty line. def dump(self): pkgs = [] @@ -923,7 +917,7 @@ class BuildEnviron(object): self.log.info(_("Dumping package information:")) for pkg in pkgs: - dump = pkg.dump(long=True) + dump = pkg.dump(int=True) for line in dump.splitlines(): self.log.info(" %s" % line) @@ -1104,13 +1098,13 @@ class Builder(object): try: self.execute("%s/remove-static-libs %s %s" % \ (SCRIPT_DIR, self.buildroot, " ".join(keep_libs))) - except ShellEnvironmentError, e: + except ShellEnvironmentError as e: log.warning(_("Could not remove static libraries: %s") % e) def post_compress_man_pages(self): try: self.execute("%s/compress-man-pages %s" % (SCRIPT_DIR, self.buildroot)) - except ShellEnvironmentError, e: + except ShellEnvironmentError as e: log.warning(_("Compressing man pages did not complete successfully.")) def post_extract_debuginfo(self): @@ -1130,7 +1124,7 @@ class Builder(object): try: self.execute("%s/extract-debuginfo %s %s" % (SCRIPT_DIR, " ".join(args), self.pkg.buildroot)) - except ShellEnvironmentError, e: + except ShellEnvironmentError as e: log.error(_("Extracting debuginfo did not complete with success. Aborting build.")) raise diff --git a/src/pakfire/cgroup.py b/src/pakfire/cgroup.py index 207f27044..a2f932840 100644 --- a/src/pakfire/cgroup.py +++ b/src/pakfire/cgroup.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 import os import shutil @@ -102,7 +102,7 @@ class CGroup(object): # Remove the file tree. try: os.rmdir(self.path) - except OSError, e: + except OSError as e: # Ignore "Device or resource busy". if e.errno == 16: return @@ -270,7 +270,7 @@ class CGroup(object): try: os.kill(proc, sig) - except OSError, e: + except OSError as e: # Skip "No such process" error if e.errno == 3: pass diff --git a/src/pakfire/cli.py b/src/pakfire/cli.py index 01e0f0246..ccea3ae79 100644 --- a/src/pakfire/cli.py +++ b/src/pakfire/cli.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -26,20 +26,20 @@ import shutil import sys import tempfile -import base -import client -import config -import daemon -import logger -import packages -import repository -import server -import transaction -import util - -from system import system -from constants import * -from i18n import _ +from . import base +from . import client +from . import config +from . import daemon +from . import logger +from . import packages +from . import repository +from . import server +from . import transaction +from . import util + +from .system import system +from .constants import * +from .i18n import _ # Initialize a very simple logging that is removed when a Pakfire instance # is started. @@ -321,7 +321,7 @@ class Cli(object): try: func = self.action2func[action] except KeyError: - raise Exception, "Unhandled action: %s" % action + raise Exception("Unhandled action: %s" % action) return func() @@ -329,13 +329,13 @@ class Cli(object): p = self.create_pakfire() for pkg in p.info(self.args.package): - print pkg.dump(long=long) + print(pkg.dump(int=int)) def handle_search(self): p = self.create_pakfire() for pkg in p.search(self.args.pattern): - print pkg.dump(short=True) + print(pkg.dump(short=True)) def handle_update(self, **args): p = self.create_pakfire() @@ -381,13 +381,13 @@ class Cli(object): p = self.create_pakfire() for pkg in p.provides(self.args.pattern): - print pkg.dump(long=long) + print(pkg.dump(int=int)) def handle_grouplist(self): p = self.create_pakfire() for pkg in p.grouplist(self.args.group[0]): - print " * %s" % pkg + print(" * %s" % pkg) def handle_groupinstall(self): p = self.create_pakfire() @@ -401,14 +401,14 @@ class Cli(object): FORMAT = " %-20s %8s %12s %12s " title = FORMAT % (_("Repository"), _("Enabled"), _("Priority"), _("Packages")) - print title - print "=" * len(title) # spacing line + print(title) + print("=" * len(title)) # spacing line for repo in repos: - print FORMAT % (repo.name, repo.enabled, repo.priority, len(repo)) + print(FORMAT % (repo.name, repo.enabled, repo.priority, len(repo))) def handle_clean_all(self): - print _("Cleaning up everything...") + print(_("Cleaning up everything...")) p = self.create_pakfire() p.clean_all() @@ -444,16 +444,16 @@ class Cli(object): source_packages = any([p.type == "source" for p in pkgs]) if binary_packages and source_packages: - raise Error, _("Cannot extract mixed package types") + raise Error(_("Cannot extract mixed package types")) if binary_packages and not target_prefix: - raise Error, _("You must provide an install directory with --target=...") + raise Error(_("You must provide an install directory with --target=...")) elif source_packages and not target_prefix: target_prefix = "/usr/src/packages/" if target_prefix == "/": - raise Error, _("Cannot extract to /.") + raise Error(_("Cannot extract to /.")) for pkg in pkgs: if pkg.type == "binary": @@ -471,7 +471,7 @@ class CliBuilder(Cli): # Check if we are already running in a pakfire container. In that # case, we cannot start another pakfire-builder. if os.environ.get("container", None) == "pakfire-builder": - raise PakfireContainerError, _("You cannot run pakfire-builder in a pakfire chroot.") + raise PakfireContainerError(_("You cannot run pakfire-builder in a pakfire chroot.")) self.parser = argparse.ArgumentParser( description = _("Pakfire builder command line interface."), @@ -588,7 +588,7 @@ class CliBuilder(Cli): help=_("Path were the output files should be copied to.")) def handle_info(self): - Cli.handle_info(self, long=True) + Cli.handle_info(self, int=True) def handle_build(self): # Get the package descriptor from the command line options @@ -599,7 +599,7 @@ class CliBuilder(Cli): pkg = os.path.abspath(pkg) else: - raise FileNotFoundError, pkg + raise FileNotFoundError(pkg) # Build argument list. kwargs = { @@ -633,7 +633,7 @@ class CliBuilder(Cli): pkg = os.path.abspath(pkg) else: - raise FileNotFoundError, pkg + raise FileNotFoundError(pkg) if self.args.mode == "release": release_build = True @@ -663,7 +663,7 @@ class CliBuilder(Cli): pkgs.append(pkg) else: - raise FileNotFoundError, pkg + raise FileNotFoundError(pkg) # Put packages to where the user said or our # current working directory. @@ -674,7 +674,7 @@ class CliBuilder(Cli): p.dist(pkg, resultdir=resultdir) def handle_provides(self): - Cli.handle_provides(self, long=True) + Cli.handle_provides(self, int=True) class CliServer(Cli): @@ -792,7 +792,7 @@ class CliServer(Cli): for file in os.listdir(tmpdir): file = os.path.join(tmpdir, file) - print file + print(file) finally: if os.path.exists(tmpdir): @@ -810,7 +810,7 @@ class CliServer(Cli): def handle_info(self): info = self.server.info() - print "\n".join(info) + print("\n".join(info)) class CliBuilderIntern(Cli): @@ -860,7 +860,7 @@ class CliBuilderIntern(Cli): if os.path.exists(pkg): pkg = os.path.abspath(pkg) else: - raise FileNotFoundError, pkg + raise FileNotFoundError(pkg) # Create pakfire instance. c = config.ConfigBuilder() @@ -1015,7 +1015,7 @@ class CliClient(Cli): pass else: - raise Exception, "Unknown filetype: %s" % package + raise Exception("Unknown filetype: %s" % package) # Format arches. if self.args.arch: @@ -1066,7 +1066,7 @@ class CliClient(Cli): ret.append("") for line in ret: - print line + print(line) def handle_connection_check(self): ret = [] @@ -1096,25 +1096,25 @@ class CliClient(Cli): ret.append(_("You could not be authenticated to the build service.")) for line in ret: - print line + print(line) def _print_jobs(self, jobs, heading=None): if heading: - print "%s:" % heading - print + print("%s:" % heading) + print() for job in jobs: line = " [%(type)8s] %(name)-30s: %(state)s" - print line % job + print(line % job) - print # Empty line at the end. + print() # Empty line at the end. def handle_jobs_active(self): jobs = self.client.get_active_jobs() if not jobs: - print _("No ongoing jobs found.") + print(_("No ongoing jobs found.")) return self._print_jobs(jobs, _("Active build jobs")) @@ -1123,7 +1123,7 @@ class CliClient(Cli): jobs = self.client.get_latest_jobs() if not jobs: - print _("No jobs found.") + print(_("No jobs found.")) return self._print_jobs(jobs, _("Recently processed build jobs")) @@ -1133,10 +1133,10 @@ class CliClient(Cli): build = self.client.get_build(build_id) if not build: - print _("A build with ID %s could not be found.") % build_id + print(_("A build with ID %s could not be found.") % build_id) return - print _("Build: %(name)s") % build + print(_("Build: %(name)s") % build) fmt = "%-14s: %s" lines = [ @@ -1149,22 +1149,22 @@ class CliClient(Cli): lines.append(" * [%(uuid)s] %(name)-30s: %(state)s" % job) for line in lines: - print " ", line - print + print(" ", line) + print() def handle_jobs_show(self): (job_id,) = self.args.job_id job = self.client.get_job(job_id) if not job: - print _("A job with ID %s could not be found.") % job_id + print(_("A job with ID %s could not be found.") % job_id) return builder = None if job["builder_id"]: builder = self.client.get_builder(job["builder_id"]) - print _("Job: %(name)s") % job + print(_("Job: %(name)s") % job) fmt = "%-14s: %s" lines = [ @@ -1198,8 +1198,8 @@ class CliClient(Cli): lines += [" %s" % line for line in pkg_lines] for line in lines: - print " ", line - print # New line. + print(" ", line) + print() # New line. def handle_test(self): error_code = self.args.error_code[0] @@ -1210,15 +1210,15 @@ class CliClient(Cli): error_code = 0 if error_code < 100 or error_code > 999: - raise Error, _("Invalid error code given.") + raise Error(_("Invalid error code given.")) res = self.client.test_code(error_code) - print _("Reponse from the server: %s") % res + print(_("Reponse from the server: %s") % res) def watch_build(self, build_id): - print self.client.build_get(build_id) + print(self.client.build_get(build_id)) # XXX TODO - print build_id + print(build_id) class CliDaemon(Cli): @@ -1354,8 +1354,8 @@ class CliKey(Cli): realname = self.args.realname[0] email = self.args.email[0] - print _("Generating the key may take a moment...") - print + print(_("Generating the key may take a moment...")) + print() # Generate the key. p = self.create_pakfire() @@ -1384,7 +1384,7 @@ class CliKey(Cli): def handle_list(self): p = self.create_pakfire() for line in p.keyring.list_keys(): - print line + print(line) def handle_sign(self): # Get the files from the command line options @@ -1397,7 +1397,7 @@ class CliKey(Cli): files.append(file) else: - raise FileNotFoundError, file + raise FileNotFoundError(file) key = self.args.key[0] @@ -1408,7 +1408,7 @@ class CliKey(Cli): # Open the package. pkg = packages.open(p, None, file) - print _("Signing %s...") % pkg.friendly_name + print(_("Signing %s...") % pkg.friendly_name) pkg.sign(key) def handle_verify(self): @@ -1428,7 +1428,7 @@ class CliKey(Cli): # Open the package. pkg = packages.open(p, None, file) - print _("Verifying %s...") % pkg.friendly_name + print(_("Verifying %s...") % pkg.friendly_name) sigs = pkg.verify() for sig in sigs: @@ -1436,19 +1436,19 @@ class CliKey(Cli): if key: subkey = key.subkeys[0] - print " %s %s" % (subkey.fpr[-16:], key.uids[0].uid) + print(" %s %s" % (subkey.fpr[-16:], key.uids[0].uid)) if sig.validity: - print " %s" % _("This signature is valid.") + print(" %s" % _("This signature is valid.")) else: - print " %s <%s>" % (sig.fpr, _("Unknown key")) - print " %s" % _("Could not check if this signature is valid.") + print(" %s <%s>" % (sig.fpr, _("Unknown key"))) + print(" %s" % _("Could not check if this signature is valid.")) created = datetime.datetime.fromtimestamp(sig.timestamp) - print " %s" % _("Created: %s") % created + print(" %s" % _("Created: %s") % created) if sig.exp_timestamp: expires = datetime.datetime.fromtimestamp(sig.exp_timestamp) - print " %s" % _("Expires: %s") % expires + print(" %s" % _("Expires: %s") % expires) - print # Empty line + print() # Empty line diff --git a/src/pakfire/client.py b/src/pakfire/client.py index c1f7ba65c..39e658bc8 100644 --- a/src/pakfire/client.py +++ b/src/pakfire/client.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,7 +19,7 @@ # # ############################################################################### -import transport +from . import transport from pakfire.constants import * from pakfire.i18n import _ diff --git a/src/pakfire/compress.py b/src/pakfire/compress.py index e07715d0b..df6cf1c7e 100644 --- a/src/pakfire/compress.py +++ b/src/pakfire/compress.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,10 +19,10 @@ # # ############################################################################### -import pakfire.lzma as lzma +import lzma -from constants import * -from i18n import _ +from .constants import * +from .i18n import _ ALGO_DEFAULT = "xz" @@ -63,7 +63,7 @@ def guess_algo(name=None, fileobj=None): # Iterate over all algoriths and their magic values # and check for a match. - for algo, magic in MAGICS.items(): + for algo, magic in list(MAGICS.items()): fileobj.seek(0) start_sequence = fileobj.read(len(magic)) @@ -82,7 +82,7 @@ def guess_algo(name=None, fileobj=None): def decompressobj(name=None, fileobj=None, algo=ALGO_DEFAULT): f_cls = FILES.get(algo, None) if not f_cls: - raise CompressionError, _("Given algorithm '%s' is not supported.") + raise CompressionError(_("Given algorithm '%s' is not supported.")) f = f_cls(name, fileobj=fileobj, mode="r") @@ -92,7 +92,7 @@ def decompressobj(name=None, fileobj=None, algo=ALGO_DEFAULT): def compressobj(name=None, fileobj=None, algo=ALGO_DEFAULT): f_cls = FILES.get(algo, None) if not f_cls: - raise CompressionError, _("Given algorithm '%s' is not supported.") + raise CompressionError(_("Given algorithm '%s' is not supported.")) f = f_cls(name, fileobj=fileobj, mode="w") diff --git a/src/pakfire/config.py b/src/pakfire/config.py index 1b30391ee..c0baf40ae 100644 --- a/src/pakfire/config.py +++ b/src/pakfire/config.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,20 +19,19 @@ # # ############################################################################### +import configparser import io import os import socket -from ConfigParser import ConfigParser - import logging log = logging.getLogger("pakfire") -import logger -from system import system +from . import logger +from .system import system -from constants import * -from i18n import _ +from .constants import * +from .i18n import _ class _Config(object): files = [] @@ -77,7 +76,7 @@ class _Config(object): def get_repos(self): repos = [] - for name, settings in self._config.items(): + for name, settings in list(self._config.items()): if not name.startswith("repo:"): continue @@ -118,20 +117,14 @@ class _Config(object): # Parse the file. with open(file) as f: - self.parse(f.read()) + self.parse(f) # Save the filename to the list of read files. self._files.append(file) - def parse(self, s): - if not s: - return - - s = str(s) - buf = io.BytesIO(s) - - config = ConfigParser() - config.readfp(buf) + def parse(self, f): + config = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation()) + config.readfp(f) # Read all data from the configuration file in the _config dict. for section in config.sections(): @@ -203,10 +196,10 @@ class _Config(object): (Only in debugging mode.) """ log.debug(_("Configuration:")) - for section, settings in self._config.items(): + for section, settings in list(self._config.items()): log.debug(" " + _("Section: %s") % section) - for k, v in settings.items(): + for k, v in list(settings.items()): log.debug(" %-20s: %s" % (k, v)) else: log.debug(" " + _("No settings in this section.")) @@ -216,7 +209,7 @@ class _Config(object): log.debug(" %s" % f) def has_distro_conf(self): - return self._config.has_key("distro") + return "distro" in self._config def get_distro_conf(self): return self.get_section("distro") diff --git a/src/pakfire/constants.py b/src/pakfire/constants.py index 237d8ed29..3f4b18339 100644 --- a/src/pakfire/constants.py +++ b/src/pakfire/constants.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -21,9 +21,9 @@ import os.path -from errors import * +from .errors import * -from __version__ import PAKFIRE_VERSION +from .__version__ import PAKFIRE_VERSION PAKFIRE_LEAST_COMPATIBLE_VERSION = PAKFIRE_VERSION @@ -94,7 +94,7 @@ MINIMAL_ENVIRONMENT = { "HOME" : "/root", "LANG" : "C", "PATH" : "/usr/bin:/bin:/usr/sbin:/sbin", - "PS1" : "\u:\w\$ ", + "PS1" : "\\u:\w\$ ", "TERM" : "vt100", } @@ -120,7 +120,7 @@ for i in ORPHAN_DIRECTORIES: ORPHAN_DIRECTORIES.append(i) -ORPHAN_DIRECTORIES.sort(cmp=lambda x,y: cmp(len(x), len(y)), reverse=True) +ORPHAN_DIRECTORIES.sort(key=lambda x: len(x), reverse=True) PACKAGE_INFO = """\ # Pakfire %(pakfire_version)s diff --git a/src/pakfire/daemon.py b/src/pakfire/daemon.py index cfd5350b9..be59a2a3e 100644 --- a/src/pakfire/daemon.py +++ b/src/pakfire/daemon.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 import hashlib import json @@ -17,8 +17,8 @@ import pakfire.system import pakfire.util from pakfire.system import system -import base -import transport +from . import base +from . import transport from pakfire.constants import * from pakfire.i18n import _ @@ -36,7 +36,7 @@ class BuildJob(dict): try: return self[key] except KeyError: - raise AttributeError, key + raise AttributeError(key) class PakfireDaemon(object): @@ -506,7 +506,7 @@ class PakfireWorker(multiprocessing.Process): f.close() if not job.source_hash_sha512 == h.hexdigest(): - raise DownloadError, "Hash check did not succeed." + raise DownloadError("Hash check did not succeed.") # Create a new instance of a build environment. build = pakfire.builder.BuildEnviron(p, tmpfile, @@ -544,12 +544,12 @@ class PakfireWorker(multiprocessing.Process): self.upload_file(job, file, "package") - except DependencyError, e: + except DependencyError as e: message = "%s: %s" % (e.__class__.__name__, e) self.update_state(job, "dependency_error", message) raise - except DownloadError, e: + except DownloadError as e: message = "%s: %s" % (e.__class__.__name__, e) self.update_state(job, "download_error", message) raise @@ -576,7 +576,7 @@ class PakfireWorker(multiprocessing.Process): except (KeyboardInterrupt, SystemExit): self.update_state(job, "aborted") - except Exception, e: + except Exception as e: # Format the exception and send it to the server. message = "%s: %s" % (e.__class__.__name__, e) diff --git a/src/pakfire/distro.py b/src/pakfire/distro.py index a6ff59891..bceba0cb5 100644 --- a/src/pakfire/distro.py +++ b/src/pakfire/distro.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -25,7 +25,7 @@ import re import logging log = logging.getLogger("pakfire") -import system +from . import system class Distribution(object): def __init__(self, data=None): @@ -100,7 +100,7 @@ class Distribution(object): return # Exceptional handling for arch. - if config.has_key("arch"): + if "arch" in config: self.arch = config["arch"] del config["arch"] @@ -223,7 +223,7 @@ class Distribution(object): def info(self): info = {} - for k, v in self.environ.items(): + for k, v in list(self.environ.items()): info[k.lower()] = v return info diff --git a/src/pakfire/downloader.py b/src/pakfire/downloader.py index 0b5b5b6a4..aac0b2826 100644 --- a/src/pakfire/downloader.py +++ b/src/pakfire/downloader.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -21,89 +21,30 @@ import json import os -import pycurl import random +import urllib import logging log = logging.getLogger("pakfire") -from config import _Config - -import urlgrabber.grabber -from urlgrabber.grabber import URLGrabber, URLGrabError -from urlgrabber.mirror import MirrorGroup -from urlgrabber.progress import TextMeter +from . import progressbar from pakfire.constants import * from pakfire.i18n import _ -class PakfireGrabber(URLGrabber): - """ - Class to make some modifications on the urlgrabber configuration. - """ - def __init__(self, pakfire, *args, **kwargs): - kwargs.update({ - "quote" : 0, - "user_agent" : "pakfire/%s" % PAKFIRE_VERSION, - - "ssl_verify_host" : False, - "ssl_verify_peer" : False, - }) - - if isinstance(pakfire, _Config): - config = pakfire - else: - config = pakfire.config - self.config = config - - # Set throttle setting. - bandwidth_throttle = config.get("downloader", "bandwidth_throttle") - if bandwidth_throttle: - try: - bandwidth_throttle = int(bandwidth_throttle) - except ValueError: - log.error("Configuration value for bandwidth_throttle is invalid.") - bandwidth_throttle = 0 - - kwargs.update({ "throttle" : bandwidth_throttle }) +class PakfireDownloader(object): + def __init__(self): + pass - # Configure HTTP proxy. - http_proxy = config.get("downloader", "http_proxy") - if http_proxy: - kwargs.update({ "proxies" : { "http" : http_proxy, "https" : http_proxy }}) - - URLGrabber.__init__(self, *args, **kwargs) - - def check_offline_mode(self): - offline = self.config.get("downloader", "offline") - if not offline: - return - - raise OfflineModeError - - def urlread(self, filename, *args, **kwargs): - self.check_offline_mode() - - # This is for older versions of urlgrabber which are packaged in Debian - # and Ubuntu and cannot handle filenames as a normal Python string but need - # a unicode string. - return URLGrabber.urlread(self, filename.encode("utf-8"), *args, **kwargs) - - def urlopen(self, filename, *args, **kwargs): - self.check_offline_mode() - - # This is for older versions of urlgrabber which are packaged in Debian - # and Ubuntu and cannot handle filenames as a normal Python string but need - # a unicode string. - return URLGrabber.urlopen(self, filename.encode("utf-8"), *args, **kwargs) + @property + def user_agent(self): + return "pakfire/%s" % PAKFIRE_VERSION - def urlgrab(self, url, *args, **kwargs): - self.check_offline_mode() + def set_proxy(self, *args, **kwargs): + pass - # This is for older versions of urlgrabber which are packaged in Debian - # and Ubuntu and cannot handle filenames as a normal Python string but need - # a unicode string. - return URLGrabber.urlgrab(self, url.encode("utf-8"), *args, **kwargs) + def use_mirrors(self, mirrors): + pass class PackageDownloader(PakfireGrabber): @@ -165,7 +106,7 @@ class SourceDownloader(object): log.info(_("Downloading source files:")) if self.pakfire.offline: - raise OfflineModeError, _("Cannot download source code in offline mode.") + raise OfflineModeError(_("Cannot download source code in offline mode.")) # Create source download directory. if not os.path.exists(SOURCE_CACHE_DIR): @@ -174,22 +115,22 @@ class SourceDownloader(object): for filename in download_files: try: self.grabber.urlgrab(os.path.basename(filename), filename=filename) - except URLGrabError, e: + except URLGrabError as e: # Remove partly downloaded file. try: os.unlink(filename) except OSError: pass - raise DownloadError, "%s %s" % (os.path.basename(filename), e) + raise DownloadError("%s %s" % (os.path.basename(filename), e)) # Check if the downloaded file was empty. if os.path.getsize(filename) == 0: # Remove the file and raise an error. os.unlink(filename) - raise DownloadError, _("Downloaded empty file: %s") \ - % os.path.basename(filename) + raise DownloadError(_("Downloaded empty file: %s") \ + % os.path.basename(filename)) log.info("") @@ -266,7 +207,7 @@ class MirrorList(object): try: mirrordata = g.urlread(self.mirrorlist, limit=MIRRORLIST_MAXSIZE) - except URLGrabError, e: + except URLGrabError as e: log.warning("Could not update the mirrorlist for repo '%s': %s" % (self.repo.name, e)) return diff --git a/src/pakfire/errors.py b/src/pakfire/errors.py index 2da382273..e8236f7e6 100644 --- a/src/pakfire/errors.py +++ b/src/pakfire/errors.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,7 +19,7 @@ # # ############################################################################### -from i18n import _ +from .i18n import _ class commandTimeoutExpired(Exception): pass # XXX cannot be as is diff --git a/src/pakfire/filelist.py b/src/pakfire/filelist.py index a0af1e9a2..43627cab2 100644 --- a/src/pakfire/filelist.py +++ b/src/pakfire/filelist.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # diff --git a/src/pakfire/i18n.py b/src/pakfire/i18n.py index 43df6abb8..d524e2cb4 100644 --- a/src/pakfire/i18n.py +++ b/src/pakfire/i18n.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -52,6 +52,6 @@ def list(parts): if len(parts) == 0: return "" if len(parts) == 1: return parts[0] return _("%(commas)s and %(last)s") % { - "commas": u", ".join(parts[:-1]), + "commas": ", ".join(parts[:-1]), "last": parts[len(parts) - 1], } diff --git a/src/pakfire/keyring.py b/src/pakfire/keyring.py index 7bd54fb9f..473c35bbe 100644 --- a/src/pakfire/keyring.py +++ b/src/pakfire/keyring.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -27,9 +27,9 @@ import os import logging log = logging.getLogger("pakfire") -from constants import * -from i18n import _ -from system import system +from .constants import * +from .i18n import _ +from .system import system class Keyring(object): def __init__(self, pakfire): diff --git a/src/pakfire/logger.py b/src/pakfire/logger.py index 9a40cddb1..d453cb226 100644 --- a/src/pakfire/logger.py +++ b/src/pakfire/logger.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # diff --git a/src/pakfire/lzma.py b/src/pakfire/lzma.py deleted file mode 100644 index 2c3806461..000000000 --- a/src/pakfire/lzma.py +++ /dev/null @@ -1,398 +0,0 @@ -"""Interface to the liblzma compression library. - -This module provides a class for reading and writing compressed files, -classes for incremental (de)compression, and convenience functions for -one-shot (de)compression. - -These classes and functions support both the XZ and legacy LZMA -container formats, as well as raw compressed data streams. -""" - -__all__ = [ - "CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256", - "CHECK_ID_MAX", "CHECK_UNKNOWN", - "FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64", - "FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC", - "FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW", - "MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4", - "MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME", - - "LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError", - "compress", "decompress", "check_is_supported", -] - -import io -from _lzma import * - - -_MODE_CLOSED = 0 -_MODE_READ = 1 -_MODE_READ_EOF = 2 -_MODE_WRITE = 3 - -_BUFFER_SIZE = 8192 - - -class LZMAFile(io.BufferedIOBase): - - """A file object providing transparent LZMA (de)compression. - - An LZMAFile can act as a wrapper for an existing file object, or - refer directly to a named file on disk. - - Note that LZMAFile provides a *binary* file interface - data read - is returned as bytes, and data to be written must be given as bytes. - """ - - def __init__(self, filename=None, mode="r", - fileobj=None, format=None, check=-1, - preset=None, filters=None): - """Open an LZMA-compressed file. - - If filename is given, open the named file. Otherwise, operate on - the file object given by fileobj. Exactly one of these two - parameters should be provided. - - mode can be "r" for reading (default), "w" for (over)writing, or - "a" for appending. - - format specifies the container format to use for the file. - If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the - default is FORMAT_XZ. - - check specifies the integrity check to use. This argument can - only be used when opening a file for writing. For FORMAT_XZ, - the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not - support integrity checks - for these formats, check must be - omitted, or be CHECK_NONE. - - When opening a file for reading, the *preset* argument is not - meaningful, and should be omitted. The *filters* argument should - also be omitted, except when format is FORMAT_RAW (in which case - it is required). - - When opening a file for writing, the settings used by the - compressor can be specified either as a preset compression - level (with the *preset* argument), or in detail as a custom - filter chain (with the *filters* argument). For FORMAT_XZ and - FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset - level. For FORMAT_RAW, the caller must always specify a filter - chain; the raw compressor does not support preset compression - levels. - - preset (if provided) should be an integer in the range 0-9, - optionally OR-ed with the constant PRESET_EXTREME. - - filters (if provided) should be a sequence of dicts. Each dict - should have an entry for "id" indicating ID of the filter, plus - additional entries for options to the filter. - """ - self._fp = None - self._closefp = False - self._mode = _MODE_CLOSED - self._pos = 0 - self._size = -1 - - if mode == "r": - if check != -1: - raise ValueError("Cannot specify an integrity check " - "when opening a file for reading") - if preset is not None: - raise ValueError("Cannot specify a preset compression " - "level when opening a file for reading") - if format is None: - format = FORMAT_AUTO - mode_code = _MODE_READ - # Save the args to pass to the LZMADecompressor initializer. - # If the file contains multiple compressed streams, each - # stream will need a separate decompressor object. - self._init_args = {"format":format, "filters":filters} - self._decompressor = LZMADecompressor(**self._init_args) - self._buffer = None - elif mode in ("w", "a"): - if format is None: - format = FORMAT_XZ - mode_code = _MODE_WRITE - self._compressor = LZMACompressor(format=format, check=check, - preset=preset, filters=filters) - else: - raise ValueError("Invalid mode: {!r}".format(mode)) - - if filename is not None and fileobj is None: - mode += "b" - self._fp = open(filename, mode) - self._closefp = True - self._mode = mode_code - elif fileobj is not None and filename is None: - self._fp = fileobj - self._mode = mode_code - else: - raise ValueError("Must give exactly one of filename and fileobj") - - def close(self): - """Flush and close the file. - - May be called more than once without error. Once the file is - closed, any other operation on it will raise a ValueError. - """ - if self._mode == _MODE_CLOSED: - return - try: - if self._mode in (_MODE_READ, _MODE_READ_EOF): - self._decompressor = None - self._buffer = None - elif self._mode == _MODE_WRITE: - self._fp.write(self._compressor.flush()) - self._compressor = None - finally: - try: - if self._closefp: - self._fp.close() - finally: - self._fp = None - self._closefp = False - self._mode = _MODE_CLOSED - - @property - def closed(self): - """True if this file is closed.""" - return self._mode == _MODE_CLOSED - - def fileno(self): - """Return the file descriptor for the underlying file.""" - self._check_not_closed() - return self._fp.fileno() - - def seekable(self): - """Return whether the file supports seeking.""" - return self.readable() - - def readable(self): - """Return whether the file was opened for reading.""" - self._check_not_closed() - return self._mode in (_MODE_READ, _MODE_READ_EOF) - - def writable(self): - """Return whether the file was opened for writing.""" - self._check_not_closed() - return self._mode == _MODE_WRITE - - # Mode-checking helper functions. - - def _check_not_closed(self): - if self.closed: - raise ValueError("I/O operation on closed file") - - def _check_can_read(self): - if not self.readable(): - raise io.UnsupportedOperation("File not open for reading") - - def _check_can_write(self): - if not self.writable(): - raise io.UnsupportedOperation("File not open for writing") - - def _check_can_seek(self): - if not self.seekable(): - raise io.UnsupportedOperation("Seeking is only supported " - "on files open for reading") - - # Fill the readahead buffer if it is empty. Returns False on EOF. - def _fill_buffer(self): - if self._buffer: - return True - - if self._decompressor.unused_data: - rawblock = self._decompressor.unused_data - else: - rawblock = self._fp.read(_BUFFER_SIZE) - - if not rawblock: - if self._decompressor.eof: - self._mode = _MODE_READ_EOF - self._size = self._pos - return False - else: - raise EOFError("Compressed file ended before the " - "end-of-stream marker was reached") - - # Continue to next stream. - if self._decompressor.eof: - self._decompressor = LZMADecompressor(**self._init_args) - - self._buffer = self._decompressor.decompress(rawblock) - return True - - # Read data until EOF. - # If return_data is false, consume the data without returning it. - def _read_all(self, return_data=True): - blocks = [] - while self._fill_buffer(): - if return_data: - blocks.append(self._buffer) - self._pos += len(self._buffer) - self._buffer = None - if return_data: - return b"".join(blocks) - - # Read a block of up to n bytes. - # If return_data is false, consume the data without returning it. - def _read_block(self, n, return_data=True): - blocks = [] - while n > 0 and self._fill_buffer(): - if n < len(self._buffer): - data = self._buffer[:n] - self._buffer = self._buffer[n:] - else: - data = self._buffer - self._buffer = None - if return_data: - blocks.append(data) - self._pos += len(data) - n -= len(data) - if return_data: - return b"".join(blocks) - - def peek(self, size=-1): - """Return buffered data without advancing the file position. - - Always returns at least one byte of data, unless at EOF. - The exact number of bytes returned is unspecified. - """ - self._check_can_read() - if self._mode == _MODE_READ_EOF or not self._fill_buffer(): - return b"" - return self._buffer - - def read(self, size=-1): - """Read up to size uncompressed bytes from the file. - - If size is negative or omitted, read until EOF is reached. - Returns b"" if the file is already at EOF. - """ - self._check_can_read() - if self._mode == _MODE_READ_EOF or size == 0: - return b"" - elif size < 0: - return self._read_all() - else: - return self._read_block(size) - - def read1(self, size=-1): - """Read up to size uncompressed bytes with at most one read - from the underlying stream. - - Returns b"" if the file is at EOF. - """ - self._check_can_read() - if (size == 0 or self._mode == _MODE_READ_EOF or - not self._fill_buffer()): - return b"" - if 0 < size < len(self._buffer): - data = self._buffer[:size] - self._buffer = self._buffer[size:] - else: - data = self._buffer - self._buffer = None - self._pos += len(data) - return data - - def write(self, data): - """Write a bytes object to the file. - - Returns the number of uncompressed bytes written, which is - always len(data). Note that due to buffering, the file on disk - may not reflect the data written until close() is called. - """ - self._check_can_write() - compressed = self._compressor.compress(data) - self._fp.write(compressed) - self._pos += len(data) - return len(data) - - # Rewind the file to the beginning of the data stream. - def _rewind(self): - self._fp.seek(0, 0) - self._mode = _MODE_READ - self._pos = 0 - self._decompressor = LZMADecompressor(**self._init_args) - self._buffer = None - - def seek(self, offset, whence=0): - """Change the file position. - - The new position is specified by offset, relative to the - position indicated by whence. Possible values for whence are: - - 0: start of stream (default): offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive - - Returns the new file position. - - Note that seeking is emulated, sp depending on the parameters, - this operation may be extremely slow. - """ - self._check_can_seek() - - # Recalculate offset as an absolute file position. - if whence == 0: - pass - elif whence == 1: - offset = self._pos + offset - elif whence == 2: - # Seeking relative to EOF - we need to know the file's size. - if self._size < 0: - self._read_all(return_data=False) - offset = self._size + offset - else: - raise ValueError("Invalid value for whence: {}".format(whence)) - - # Make it so that offset is the number of bytes to skip forward. - if offset < self._pos: - self._rewind() - else: - offset -= self._pos - - # Read and discard data until we reach the desired position. - if self._mode != _MODE_READ_EOF: - self._read_block(offset, return_data=False) - - return self._pos - - def tell(self): - """Return the current file position.""" - self._check_not_closed() - return self._pos - - -def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None): - """Compress a block of data. - - Refer to LZMACompressor's docstring for a description of the - optional arguments *format*, *check*, *preset* and *filters*. - - For incremental compression, use an LZMACompressor object instead. - """ - comp = LZMACompressor(format, check, preset, filters) - return comp.compress(data) + comp.flush() - - -def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None): - """Decompress a block of data. - - Refer to LZMADecompressor's docstring for a description of the - optional arguments *format*, *check* and *filters*. - - For incremental decompression, use a LZMADecompressor object instead. - """ - results = [] - while True: - decomp = LZMADecompressor(format, memlimit, filters) - results.append(decomp.decompress(data)) - if not decomp.eof: - raise LZMAError("Compressed data ended before the " - "end-of-stream marker was reached") - if not decomp.unused_data: - return b"".join(results) - # There is unused data left over. Proceed to next stream. - data = decomp.unused_data diff --git a/src/pakfire/packages/__init__.py b/src/pakfire/packages/__init__.py index 414599e87..2b34143f0 100644 --- a/src/pakfire/packages/__init__.py +++ b/src/pakfire/packages/__init__.py @@ -21,12 +21,13 @@ import tarfile -from base import Package -from file import BinaryPackage, FilePackage, SourcePackage -from installed import DatabasePackage, InstalledPackage -from solv import SolvPackage +from . import file -from make import Makefile +from .base import Package +from .installed import DatabasePackage, InstalledPackage +from .solv import SolvPackage + +from .make import Makefile from pakfire.constants import * @@ -42,9 +43,9 @@ def open(pakfire, repo, filename): # Simply check if the given file is a tarfile. if tarfile.is_tarfile(filename): if filename.endswith(".src.%s" % PACKAGE_EXTENSION): - return SourcePackage(pakfire, repo, filename) + return file.SourcePackage(pakfire, repo, filename) - return BinaryPackage(pakfire, repo, filename) + return file.BinaryPackage(pakfire, repo, filename) elif filename.endswith(".%s" % MAKEFILE_EXTENSION): return Makefile(pakfire, filename) diff --git a/src/pakfire/packages/base.py b/src/pakfire/packages/base.py index 36758e318..de2b9b621 100644 --- a/src/pakfire/packages/base.py +++ b/src/pakfire/packages/base.py @@ -265,7 +265,7 @@ class Package(object): @property def metadata(self): - raise NotImplementedError, self + raise NotImplementedError(self) @property def friendly_name(self): @@ -499,7 +499,7 @@ class Package(object): return [] def extract(self, path, prefix=None): - raise NotImplementedError, "%s" % repr(self) + raise NotImplementedError("%s" % repr(self)) def remove(self, message=None, prefix=None): # Make two filelists. One contains all binary files that need to be @@ -565,8 +565,8 @@ class Package(object): try: shutil.move(file, file_save) - except shutil.Error, e: - print e + except shutil.Error as e: + print(e) if prefix: file_save = os.path.relpath(file_save, prefix) diff --git a/src/pakfire/packages/file.py b/src/pakfire/packages/file.py index 7cf3630c3..8c5c40582 100644 --- a/src/pakfire/packages/file.py +++ b/src/pakfire/packages/file.py @@ -20,6 +20,7 @@ ############################################################################### import hashlib +import lzma import os import re import shutil @@ -29,17 +30,16 @@ import tempfile import logging log = logging.getLogger("pakfire") -import pakfire.filelist -import pakfire.lzma as lzma -import pakfire.util as util -import pakfire.compress as compress -from pakfire.constants import * -from pakfire.i18n import _ +from ..constants import * +from ..i18n import _ -import base -import lexer -import make -import tar +from .. import compress +from .. import filelist + +from . import base +from . import lexer +from . import make +from . import tar class FilePackage(base.Package): """ @@ -79,7 +79,7 @@ class FilePackage(base.Package): pass else: - raise PackageFormatUnsupportedError, _("Filename: %s") % self.filename + raise PackageFormatUnsupportedError(_("Filename: %s") % self.filename) def check(self): """ @@ -87,7 +87,7 @@ class FilePackage(base.Package): can be opened. """ if not tarfile.is_tarfile(self.filename): - raise FileError, "Given file is not of correct format: %s" % self.filename + raise FileError("Given file is not of correct format: %s" % self.filename) assert self.format in PACKAGE_FORMATS_SUPPORTED, self.format @@ -134,8 +134,8 @@ class FilePackage(base.Package): payload_archive = tar.InnerTarFile.open(fileobj=payload) else: - raise Exception, "Unhandled payload compression type: %s" % \ - self.payload_compression + raise Exception("Unhandled payload compression type: %s" % \ + self.payload_compression) return payload_archive @@ -152,6 +152,8 @@ class FilePackage(base.Package): pb = None if message: message = "%-10s : %s" % (message, self.friendly_name) + + from . import util pb = util.make_progress(message, len(self.filelist), eta=False) # Collect messages with errors and warnings, that are passed to @@ -169,7 +171,7 @@ class FilePackage(base.Package): i = 0 while True: - member = payload_archive.next() + member = next(payload_archive) if not member: break @@ -286,7 +288,7 @@ class FilePackage(base.Package): # Search for filename. while True: - member = payload_archive.next() + member = next(payload_archive) if not member: break @@ -655,7 +657,7 @@ class FilePackage(base.Package): a.close() sigs = [] - for signature in self.signatures.values(): + for signature in list(self.signatures.values()): sigs += self.pakfire.keyring.verify(signature, chksums) # Open the archive to access all files we will need. @@ -670,7 +672,7 @@ class FilePackage(base.Package): f.close() a.close() - for filename, chksum in chksums.items(): + for filename, chksum in list(chksums.items()): ret = self.check_chksum(filename, chksum) if ret: @@ -679,7 +681,7 @@ class FilePackage(base.Package): else: log.debug("Checksum of %s does not match." % filename) - raise Exception, "Checksum does not match: %s" % filename + raise Exception("Checksum does not match: %s" % filename) return sigs @@ -705,6 +707,7 @@ class FilePackage(base.Package): """ Calculate the hash1 of this package. """ + from . import util return util.calc_hash1(self.filename) @property diff --git a/src/pakfire/packages/installed.py b/src/pakfire/packages/installed.py index b358f6404..577a21b57 100644 --- a/src/pakfire/packages/installed.py +++ b/src/pakfire/packages/installed.py @@ -24,8 +24,8 @@ import os import pakfire.downloader import pakfire.filelist -from base import Package -from file import BinaryPackage +from .base import Package +from .file import BinaryPackage import pakfire.util as util from pakfire.constants import * @@ -41,7 +41,7 @@ class DatabasePackage(Package): self._data = {} self._filelist = None - for key in data.keys(): + for key in list(data.keys()): self._data[key] = data[key] def __repr__(self): @@ -325,7 +325,7 @@ class DatabasePackage(Package): # Verify if the download was okay. if not cache.verify(cache_filename, self.hash1): - raise Exception, "XXX this should never happen..." + raise Exception("XXX this should never happen...") filename = os.path.join(cache.path, cache_filename) return BinaryPackage(self.pakfire, self.repo, filename) diff --git a/src/pakfire/packages/lexer.py b/src/pakfire/packages/lexer.py index c31ffd69b..38bd52211 100644 --- a/src/pakfire/packages/lexer.py +++ b/src/pakfire/packages/lexer.py @@ -308,7 +308,7 @@ class Lexer(object): break if not found: - raise LexerUnhandledLine, "%d: %s" % (self.lineno, line) + raise LexerUnhandledLine("%d: %s" % (self.lineno, line)) def read_block(self, pattern_start=None, pattern_line=None, pattern_end=None, raw=False): @@ -349,10 +349,10 @@ class Lexer(object): continue if not line.startswith(LEXER_BLOCK_LINE_INDENT): - raise LexerError, "Line has not the right indentation: %d: %s" \ - % (self.lineno, line) + raise LexerError("Line has not the right indentation: %d: %s" \ + % (self.lineno, line)) - raise LexerUnhandledLine, "%d: %s" % (self.lineno, line) + raise LexerUnhandledLine("%d: %s" % (self.lineno, line)) return (groups, lines) @@ -366,7 +366,7 @@ class Lexer(object): if not line: return - raise LexerUnhandledLine, "%d: %s" % (self.lineno, line) + raise LexerUnhandledLine("%d: %s" % (self.lineno, line)) DEP_DEFINITIONS = ("prerequires", "requires", "provides", "conflicts", "obsoletes", "recommends", "suggests") @@ -375,7 +375,7 @@ class Lexer(object): m = re.match(pattern, line) if not m: - raise LexerError, "Not a definition: %s" % line + raise LexerError("Not a definition: %s" % line) # Line was correctly parsed, can go on. self._lineno += 1 @@ -411,7 +411,7 @@ class Lexer(object): m = re.match(LEXER_DEFINE_BEGIN, line) if not m: - raise Exception, "XXX not a define" + raise Exception("XXX not a define") # Check content of next line. found = None @@ -435,7 +435,7 @@ class Lexer(object): if found is None: line = self.get_line(self._lineno) - raise LexerUnhandledLine, "%d: %s" % (self.lineno, line) + raise LexerUnhandledLine("%d: %s" % (self.lineno, line)) # Go in to next line. self._lineno += 1 @@ -464,7 +464,7 @@ class Lexer(object): value.append("") continue - raise LexerError, "Unhandled line: %s" % line + raise LexerError("Unhandled line: %s" % line) self._definitions[key] = "\n".join(value) @@ -487,8 +487,8 @@ class Lexer(object): found = True if not found: - raise LexerError, "No valid begin of if statement: %d: %s" \ - % (self.lineno, line) + raise LexerError("No valid begin of if statement: %d: %s" \ + % (self.lineno, line)) self._lineno += 1 clause = m.groups() @@ -519,10 +519,10 @@ class Lexer(object): lines.append("") continue - raise LexerUnhandledLine, "%d: %s" % (self.lineno, line) + raise LexerUnhandledLine("%d: %s" % (self.lineno, line)) if not block_closed: - raise LexerError, "Unclosed if block" + raise LexerError("Unclosed if block") return (clause, lines) @@ -549,7 +549,7 @@ class Lexer(object): line = self.get_line(self._lineno) m = re.match(LEXER_IF_END, line) if not m: - raise LexerError, "Unclosed if clause" + raise LexerError("Unclosed if clause") self._lineno += 1 @@ -572,7 +572,7 @@ class Lexer(object): elif op == "!=": val = not a == b else: - raise LexerError, "Unknown operator: %s" % op + raise LexerError("Unknown operator: %s" % op) else: # Else is always true. @@ -654,15 +654,15 @@ class TemplateLexer(DefaultLexer): m = re.match(LEXER_SCRIPTLET_BEGIN, line) if not m: - raise Exception, "Not a scriptlet" + raise Exception("Not a scriptlet") self._lineno += 1 name = m.group(1) # check if scriptlet was already defined. - if self.scriptlets.has_key(name): - raise Exception, "Scriptlet %s is already defined" % name + if name in self.scriptlets: + raise Exception("Scriptlet %s is already defined" % name) lang = m.group(2) or "shell" lines = [ @@ -689,7 +689,7 @@ class TemplateLexer(DefaultLexer): self._lineno += 1 continue - raise LexerUnhandledLine, "%d: %s" % (self.lineno, line) + raise LexerUnhandledLine("%d: %s" % (self.lineno, line)) self.scriptlets[name] = { "lang" : lang, @@ -745,7 +745,7 @@ class PackageLexer(TemplateLexer): m = re.match(LEXER_PACKAGE_INHERIT, line) if not m: - raise LexerError, "Not a template inheritance: %s" % line + raise LexerError("Not a template inheritance: %s" % line) self._lineno += 1 @@ -847,7 +847,7 @@ class RootLexer(ExportLexer): # Import all environment variables. if environ: - for k, v in environ.items(): + for k, v in list(environ.items()): self._definitions[k] = v self.exports.append(k) @@ -919,7 +919,7 @@ class RootLexer(ExportLexer): m = re.match(LEXER_BUILD_BEGIN, line) if not m: - raise LexerError, "Not a build statement: %s" % line + raise LexerError("Not a build statement: %s" % line) self._lineno += 1 @@ -953,7 +953,7 @@ class RootLexer(ExportLexer): m = re.match(LEXER_INCLUDE, line) if not m: - raise LexerError, "Not an include statement: %s" % line + raise LexerError("Not an include statement: %s" % line) # Get the filename from the line. file = m.group(1) @@ -1012,7 +1012,7 @@ class PackagesLexer(DefaultLexer): # Copy all templates and packages but make sure # to update the parent lexer (for accessing each other). - for name, template in other.templates.items(): + for name, template in list(other.templates.items()): template.parent = self self._templates[name] = template @@ -1034,7 +1034,7 @@ class PackagesLexer(DefaultLexer): m = re.match(LEXER_TEMPLATE_BEGIN, line) if not m: - raise Exception, "Not a template" + raise Exception("Not a template") # Line was correctly parsed, can go on. self._lineno += 1 @@ -1069,7 +1069,7 @@ class PackagesLexer(DefaultLexer): m = re.match(LEXER_PACKAGE_BEGIN, line) if not m: - raise Exception, "Not a package: %s" %line + raise Exception("Not a package: %s" %line) self._lineno += 1 @@ -1078,7 +1078,7 @@ class PackagesLexer(DefaultLexer): m = re.match(LEXER_VALID_PACKAGE_NAME, name) if not m: - raise LexerError, "Invalid package name: %s" % name + raise LexerError("Invalid package name: %s" % name) lines = ["_name = %s" % name] @@ -1108,14 +1108,14 @@ class PackagesLexer(DefaultLexer): # If there is an unhandled line in a block, we raise an error. if opened: - raise Exception, "XXX unhandled line in package block: %s" % line + raise Exception("XXX unhandled line in package block: %s" % line) # If the block was never opened, we just go on. else: break if opened: - raise LexerError, "Unclosed package block '%s'." % name + raise LexerError("Unclosed package block '%s'." % name) package = PackageLexer(lines, parent=self) self.packages.append(package) diff --git a/src/pakfire/packages/make.py b/src/pakfire/packages/make.py index 4a9c4708c..95062cce4 100644 --- a/src/pakfire/packages/make.py +++ b/src/pakfire/packages/make.py @@ -27,23 +27,20 @@ import tarfile import tempfile import uuid -from urlgrabber.grabber import URLGrabber, URLGrabError -from urlgrabber.progress import TextMeter - -import lexer -import packager +from . import lexer +from . import packager import logging log = logging.getLogger("pakfire") -import pakfire.downloader as downloader -import pakfire.util as util +from ..constants import * +from ..i18n import _ -from base import Package +from .. import downloader +from .. import system +from .. import util -from pakfire.constants import * -from pakfire.i18n import _ -from pakfire.system import system +from .base import Package class MakefileBase(Package): def __init__(self, pakfire, filename=None, lines=None): @@ -52,7 +49,7 @@ class MakefileBase(Package): # Update environment. environ = self.pakfire.distro.environ environ.update({ - "PARALLELISMFLAGS" : "-j%d" % system.parallelism, + "PARALLELISMFLAGS" : "-j%d" % system.system.parallelism, }) if filename: diff --git a/src/pakfire/packages/packager.py b/src/pakfire/packages/packager.py index 8ee97c478..80d5319fb 100644 --- a/src/pakfire/packages/packager.py +++ b/src/pakfire/packages/packager.py @@ -36,14 +36,12 @@ import zlib import logging log = logging.getLogger("pakfire") -import pakfire.lzma as lzma import pakfire.util as util from pakfire.constants import * from pakfire.i18n import _ -import file -import tar +from . import tar class Packager(object): payload_compression = None @@ -148,7 +146,7 @@ class Packager(object): datafile = tar.InnerTarFile.open(datafile) while True: - m = datafile.next() + m = next(datafile) if not m: break @@ -206,7 +204,7 @@ class Packager(object): t = tar.InnerTarFile.open(datafile) while True: - m = t.next() + m = next(t) if not m: break @@ -481,7 +479,7 @@ class BinaryPackager(Packager): try: f = open(path, "b") except OSError: - raise Exception, "Cannot open script file: %s" % lang["path"] + raise Exception("Cannot open script file: %s" % lang["path"]) s = open(scriptlet_file, "wb") @@ -517,7 +515,7 @@ class BinaryPackager(Packager): s.close() else: - raise Exception, "Unknown scriptlet language: %s" % scriptlet["lang"] + raise Exception("Unknown scriptlet language: %s" % scriptlet["lang"]) scriptlets.append((scriptlet_name, scriptlet_file)) @@ -634,6 +632,9 @@ class BinaryPackager(Packager): except OSError: shutil.copy2(tempfile, resultfile) + # XXX to resolve a cyclic dependency + from . import file + return file.BinaryPackage(self.pakfire, self.pakfire.repos.dummy, resultfile) diff --git a/src/pakfire/packages/solv.py b/src/pakfire/packages/solv.py index e8975028c..90e672ec6 100644 --- a/src/pakfire/packages/solv.py +++ b/src/pakfire/packages/solv.py @@ -22,8 +22,8 @@ import os import re -import base -import file +from . import base +from . import file class SolvPackage(base.Package): def __init__(self, pakfire, solvable, repo=None): diff --git a/src/pakfire/packages/tar.py b/src/pakfire/packages/tar.py index 2cdc1b0c9..cd4306e05 100644 --- a/src/pakfire/packages/tar.py +++ b/src/pakfire/packages/tar.py @@ -19,13 +19,13 @@ # # ############################################################################### +import lzma import os import tarfile import logging log = logging.getLogger("pakfire") -import pakfire.lzma as lzma import pakfire.util as util from pakfire.constants import * from pakfire.i18n import _ @@ -83,7 +83,7 @@ class InnerTarFile(tarfile.TarFile): # Extract file the normal way... try: tarfile.TarFile.extract(self, member, path) - except OSError, e: + except OSError as e: log.warning(_("Could not extract file: /%(src)s - %(dst)s") \ % { "src" : member.name, "dst" : e, }) diff --git a/src/pakfire/progressbar.py b/src/pakfire/progressbar.py index 434676822..b05a53756 100644 --- a/src/pakfire/progressbar.py +++ b/src/pakfire/progressbar.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,7 +19,7 @@ # # ############################################################################### -from __future__ import division + import datetime import fcntl @@ -30,9 +30,9 @@ import sys import termios import time -import util +from . import util -from i18n import _ +from .i18n import _ DEFAULT_VALUE_MAX = 100 DEFAULT_TERM_WIDTH = 80 diff --git a/src/pakfire/repository/__init__.py b/src/pakfire/repository/__init__.py index e242b655b..b0c1a653f 100644 --- a/src/pakfire/repository/__init__.py +++ b/src/pakfire/repository/__init__.py @@ -24,14 +24,14 @@ import re import logging log = logging.getLogger("pakfire") -import pakfire.packages as packages +from .. import packages -from pakfire.i18n import _ +from .base import RepositoryDummy +from .local import RepositoryDir, RepositoryBuild +from .remote import RepositoryRemote +from .system import RepositorySystem -from base import RepositoryDummy -from local import RepositoryDir, RepositoryBuild -from remote import RepositoryRemote -from system import RepositorySystem +from ..i18n import _ class Repositories(object): """ @@ -47,7 +47,8 @@ class Repositories(object): self.__repos = {} # Create a dummy repository - self.dummy = RepositoryDummy(self.pakfire) + from . import base + self.dummy = base.RepositoryDummy(self.pakfire) # Create the local repository. self.local = RepositorySystem(self.pakfire) @@ -63,7 +64,7 @@ class Repositories(object): self._parse(repo_name, repo_args) def __iter__(self): - repositories = self.__repos.values() + repositories = list(self.__repos.values()) repositories.sort() return iter(repositories) @@ -131,7 +132,7 @@ class Repositories(object): "arch" : self.distro.arch, } - for k, v in _args.items(): + for k, v in list(_args.items()): # Skip all non-strings. if not type(v) == type("a"): continue @@ -154,8 +155,8 @@ class Repositories(object): self.add_repo(repo) def add_repo(self, repo): - if self.__repos.has_key(repo.name): - raise Exception, "Repository with that name does already exist: %s" % repo.name + if repo.name in self.__repos: + raise Exception("Repository with that name does already exist: %s" % repo.name) self.__repos[repo.name] = repo diff --git a/src/pakfire/repository/base.py b/src/pakfire/repository/base.py index c16ca37c8..53a2b928f 100644 --- a/src/pakfire/repository/base.py +++ b/src/pakfire/repository/base.py @@ -22,10 +22,10 @@ import logging log = logging.getLogger("pakfire") -import index +from .. import satsolver -import pakfire.packages as packages -import pakfire.satsolver as satsolver +from . import index +from . import packages class RepositoryFactory(object): def __init__(self, pakfire, name, description): @@ -133,7 +133,7 @@ class RepositoryFactory(object): dumps = [] # Dump all package information of the packages in this repository. for pkg in self: - dump = pkg.dump(long=long, filelist=filelist) + dump = pkg.dump(int=int, filelist=filelist) dumps.append(dump) return "\n\n".join(dumps) diff --git a/src/pakfire/repository/database.py b/src/pakfire/repository/database.py index 6780aa217..8dfdde1f0 100644 --- a/src/pakfire/repository/database.py +++ b/src/pakfire/repository/database.py @@ -117,7 +117,7 @@ class DatabaseLocal(Database): # Check if we actually can open the database. if not self.format in DATABASE_FORMATS_SUPPORTED: - raise DatabaseFormatError, _("The format of the database is not supported by this version of pakfire.") + raise DatabaseFormatError(_("The format of the database is not supported by this version of pakfire.")) def __len__(self): count = 0 @@ -226,7 +226,7 @@ class DatabaseLocal(Database): # Check if database version is supported. if self.format > DATABASE_FORMAT: - raise DatabaseError, _("Cannot use database with version greater than %s.") % DATABASE_FORMAT + raise DatabaseError(_("Cannot use database with version greater than %s.") % DATABASE_FORMAT) log.info(_("Migrating database from format %(old)s to %(new)s.") % \ { "old" : self.format, "new" : DATABASE_FORMAT }) diff --git a/src/pakfire/repository/index.py b/src/pakfire/repository/index.py index 383e35be8..850a23086 100644 --- a/src/pakfire/repository/index.py +++ b/src/pakfire/repository/index.py @@ -24,8 +24,7 @@ import os import logging log = logging.getLogger("pakfire") -import pakfire.packages as packages -import pakfire.satsolver as satsolver +from .. import packages class Index(object): """ @@ -66,6 +65,7 @@ class Index(object): def add_package(self, pkg): log.debug("Adding package to index %s: %s" % (self, pkg)) + from .. import satsolver solvable = satsolver.Solvable(self.solver_repo, pkg.name, pkg.friendly_version, pkg.arch) diff --git a/src/pakfire/repository/local.py b/src/pakfire/repository/local.py index 99def7a44..5dda9b906 100644 --- a/src/pakfire/repository/local.py +++ b/src/pakfire/repository/local.py @@ -22,13 +22,12 @@ import os import shutil import tempfile -import urlgrabber import logging log = logging.getLogger("pakfire") -import base -import metadata +from . import base +from . import metadata import pakfire.compress as compress import pakfire.downloader as downloader diff --git a/src/pakfire/repository/remote.py b/src/pakfire/repository/remote.py index 410750acb..223c5cf49 100644 --- a/src/pakfire/repository/remote.py +++ b/src/pakfire/repository/remote.py @@ -20,14 +20,13 @@ ############################################################################### import os -import urlgrabber import logging log = logging.getLogger("pakfire") -import base -import cache -import metadata +from . import base +from . import cache +from . import metadata import pakfire.compress as compress import pakfire.downloader as downloader @@ -94,7 +93,7 @@ class RepositoryRemote(base.RepositoryFactory): "http://" : 75, } - for url, prio in url2priority.items(): + for url, prio in list(url2priority.items()): if self.baseurl.startswith(url): priority = prio break @@ -148,8 +147,8 @@ class RepositoryRemote(base.RepositoryFactory): exists = self.cache.exists(cache_filename) if not exists and offline: - raise OfflineModeError, _("No metadata available for repository %s. Cannot download any.") \ - % self.name + raise OfflineModeError(_("No metadata available for repository %s. Cannot download any.") \ + % self.name) elif exists and offline: # Repository metadata exists. We cannot update anything because of the offline mode. @@ -171,9 +170,9 @@ class RepositoryRemote(base.RepositoryFactory): while True: try: data = grabber.urlread(filename, limit=METADATA_DOWNLOAD_LIMIT) - except urlgrabber.grabber.URLGrabError, e: + except urlgrabber.grabber.URLGrabError as e: if e.errno == 256: - raise DownloadError, _("Could not update metadata for %s from any mirror server") % self.name + raise DownloadError(_("Could not update metadata for %s from any mirror server") % self.name) grabber.increment_mirror(grabber) continue @@ -221,7 +220,7 @@ class RepositoryRemote(base.RepositoryFactory): # Raise an exception when we are running in offline mode but an update is required. if force and offline: - raise OfflineModeError, _("Cannot download package database for %s in offline mode.") % self.name + raise OfflineModeError(_("Cannot download package database for %s in offline mode.") % self.name) elif not force: return @@ -306,13 +305,13 @@ class RepositoryRemote(base.RepositoryFactory): # If we are in offline mode, we cannot download any files. if self.pakfire.offline and not self.baseurl.startswith("file://"): - raise OfflineModeError, _("Cannot download this file in offline mode: %s") \ - % filename + raise OfflineModeError(_("Cannot download this file in offline mode: %s") \ + % filename) try: i = grabber.urlopen(filename) - except urlgrabber.grabber.URLGrabError, e: - raise DownloadError, _("Could not download %s: %s") % (filename, e) + except urlgrabber.grabber.URLGrabError as e: + raise DownloadError(_("Could not download %s: %s") % (filename, e)) # Open input and output files and download the file. o = self.cache.open(cache_filename, "w") diff --git a/src/pakfire/repository/system.py b/src/pakfire/repository/system.py index 6a02fbe27..e84a231e0 100644 --- a/src/pakfire/repository/system.py +++ b/src/pakfire/repository/system.py @@ -21,8 +21,8 @@ import os -import base -import database +from . import base +from . import database import pakfire.packages as packages import pakfire.util as util diff --git a/src/pakfire/satsolver.py b/src/pakfire/satsolver.py index 5b692b8f4..7e9949fe7 100644 --- a/src/pakfire/satsolver.py +++ b/src/pakfire/satsolver.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -25,14 +25,14 @@ import time import logging log = logging.getLogger("pakfire") -import filelist -import packages -import transaction -import util -import _pakfire +from . import _pakfire +from . import filelist +from . import packages +from . import transaction +from . import util -from constants import * -from i18n import _ +from .constants import * +from .i18n import _ # Put some variables into our own namespace, to make them easily accessible # for code, that imports the satsolver module. @@ -157,7 +157,7 @@ class Pool(_pakfire.Pool): if solver.status: return solver - raise DependencyError, solver.get_problem_string() + raise DependencyError(solver.get_problem_string()) def solve(self, request, interactive=False, logger=None, force_best=False, **kwargs): # XXX implement interactive @@ -169,7 +169,7 @@ class Pool(_pakfire.Pool): solver = Solver(self, request, logger=logger) # Apply configuration to solver. - for key, val in kwargs.items(): + for key, val in list(kwargs.items()): solver.set(key, val) # Do the solving. @@ -201,7 +201,7 @@ class Request(_pakfire.Request): self.install_name(what) return - raise Exception, "Unknown type" + raise Exception("Unknown type") def remove(self, what): if isinstance(what, Solvable): @@ -216,7 +216,7 @@ class Request(_pakfire.Request): self.remove_name(what) return - raise Exception, "Unknown type" + raise Exception("Unknown type") def update(self, what): if isinstance(what, Solvable): @@ -231,7 +231,7 @@ class Request(_pakfire.Request): self.update_name(what) return - raise Exception, "Unknown type" + raise Exception("Unknown type") def lock(self, what): if isinstance(what, Solvable): @@ -246,7 +246,7 @@ class Request(_pakfire.Request): self.lock_name(what) return - raise Exception, "Unknown type" + raise Exception("Unknown type") def noobsoletes(self, what): if isinstance(what, Solvable): @@ -261,7 +261,7 @@ class Request(_pakfire.Request): self.noobsoletes_name(what) return - raise Exception, "Unknown type" + raise Exception("Unknown type") class Solver(object): @@ -305,14 +305,14 @@ class Solver(object): try: flag = self.option2flag[option] except KeyError: - raise Exception, "Unknown configuration setting: %s" % option + raise Exception("Unknown configuration setting: %s" % option) self.solver.set_flag(flag, value) def get(self, option): try: flag = self.option2flag[option] except KeyError: - raise Exception, "Unknown configuration setting: %s" % option + raise Exception("Unknown configuration setting: %s" % option) return self.solver.get_flag(flag) def solve(self, force_best=False): @@ -328,7 +328,7 @@ class Solver(object): if self.status: self.logger.info(_("Dependency solving finished in %.2f ms") % (self.time * 1000)) else: - raise DependencyError, self.get_problem_string() + raise DependencyError(self.get_problem_string()) @property def problems(self): @@ -386,17 +386,17 @@ class Solver(object): if not util.ask_user(_("Do you want to manually alter the request?")): return False - print _("You can now try to satisfy the solver by modifying your request.") + print(_("You can now try to satisfy the solver by modifying your request.")) altered = False while True: if len(problems) > 1: - print _("Which problem to you want to resolve?") + print(_("Which problem to you want to resolve?")) if altered: - print _("Press enter to try to re-solve the request.") - print "[1-%s]:" % len(problems), + print(_("Press enter to try to re-solve the request.")) + print("[1-%s]:" % len(problems), end=' ') - answer = raw_input() + answer = input() # If the user did not enter anything, we abort immediately. if not answer: @@ -424,22 +424,22 @@ class Solver(object): if len(solutions) == 1: solution = solutions[0] - print _(" Solution: %s") % solution - print + print(_(" Solution: %s") % solution) + print() if util.ask_user("Do you accept the solution above?"): altered = True - print "XXX do something" + print("XXX do something") continue else: - print _(" Solutions:") + print(_(" Solutions:")) i = 0 for solution in solutions: i += 1 - print " #%d: %s" % (i, solution) + print(" #%d: %s" % (i, solution)) - print + print() if not altered: return False diff --git a/src/pakfire/server.py b/src/pakfire/server.py index edc070fd8..70f58edcf 100644 --- a/src/pakfire/server.py +++ b/src/pakfire/server.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -26,7 +26,7 @@ import socket import subprocess import tempfile import time -import xmlrpclib +import xmlrpc.client import logging log = logging.getLogger("pakfire") @@ -140,7 +140,7 @@ class Source(object): pakfire.api.dist(pkgs, resultdirs=[tmpdir,], **pakfire_args) # Create a kind of dummy repository to link the packages against it. - if pakfire_args.has_key("build_id"): + if "build_id" in pakfire_args: del pakfire_args["build_id"] pakfire_args["mode"] = "server" @@ -161,7 +161,7 @@ class Source(object): return self.update_files(_files) -class XMLRPCTransport(xmlrpclib.Transport): +class XMLRPCTransport(xmlrpc.client.Transport): user_agent = "pakfire/%s" % PAKFIRE_VERSION def single_request(self, *args, **kwargs): @@ -172,9 +172,9 @@ class XMLRPCTransport(xmlrpclib.Transport): while tries: try: - ret = xmlrpclib.Transport.single_request(self, *args, **kwargs) + ret = xmlrpc.client.Transport.single_request(self, *args, **kwargs) - except socket.error, e: + except socket.error as e: # These kinds of errors are not fatal, but they can happen on # a bad internet connection or whatever. # 32 Broken pipe @@ -183,12 +183,12 @@ class XMLRPCTransport(xmlrpclib.Transport): if not e.errno in (32, 110, 111,): raise - except xmlrpclib.ProtocolError, e: + except xmlrpc.client.ProtocolError as e: # Log all XMLRPC protocol errors. log.error("XMLRPC protocol error:") log.error(" URL: %s" % e.url) log.error(" HTTP headers:") - for header in e.headers.items(): + for header in list(e.headers.items()): log.error(" %s: %s" % header) log.error(" Error code: %s" % e.errcode) log.error(" Error message: %s" % e.errmsg) @@ -207,21 +207,21 @@ class XMLRPCTransport(xmlrpclib.Transport): else: log.error("Maximum number of tries was reached. Giving up.") # XXX need better exception here. - raise Exception, "Could not fulfill request." + raise Exception("Could not fulfill request.") return ret -class ServerProxy(xmlrpclib.ServerProxy): +class ServerProxy(xmlrpc.client.ServerProxy): def __init__(self, server, *args, **kwargs): # Some default settings. - if not kwargs.has_key("transport"): + if "transport" not in kwargs: kwargs["transport"] = XMLRPCTransport() kwargs["allow_none"] = True - xmlrpclib.ServerProxy.__init__(self, server, *args, **kwargs) + xmlrpc.client.ServerProxy.__init__(self, server, *args, **kwargs) class Server(object): @@ -353,7 +353,7 @@ class Server(object): log.info("Uploading chunk %s/%s of %s." % (chunk, chunks, os.path.basename(filename))) - data = xmlrpclib.Binary(data) + data = xmlrpc.client.Binary(data) self.conn.upload_chunk(upload_id, data) # Tell the server, that we finished the upload. @@ -362,7 +362,7 @@ class Server(object): # If the server sends false, something happened with the upload that # could not be recovered. if not ret: - raise Exception, "Upload failed." + raise Exception("Upload failed.") def update_build_status(self, build_id, status, message=""): ret = self.conn.update_build_state(build_id, status, message) @@ -370,7 +370,7 @@ class Server(object): # If the server returns False, then it did not acknowledge our status # update and the build has to be aborted. if not ret: - raise BuildAbortedException, "The build was aborted by the master server." + raise BuildAbortedException("The build was aborted by the master server.") def build_job(self, type=None): build = self.conn.build_job() # XXX type=None @@ -390,7 +390,7 @@ class Server(object): try: func = job_types[build_type] except KeyError: - raise Exception, "Build type not supported: %s" % type + raise Exception("Build type not supported: %s" % type) # Call the function that processes the build and try to catch general # exceptions and report them to the server. @@ -402,7 +402,7 @@ class Server(object): # This has already been reported by func. raise - except Exception, e: + except Exception as e: # Format the exception and send it to the server. message = "%s: %s" % (e.__class__.__name__, e) @@ -432,9 +432,9 @@ class Server(object): # Check if the download checksum matches. if pakfire.util.calc_hash1(tmpfile) == hash1: - print "Checksum matches: %s" % hash1 + print("Checksum matches: %s" % hash1) else: - raise DownloadError, "Download was corrupted" + raise DownloadError("Download was corrupted") # Update the build status on the server. self.update_build_status(build_id, "running") @@ -454,7 +454,7 @@ class Server(object): self.upload_file(file, build_id) - except DependencyError, e: + except DependencyError as e: message = "%s: %s" % (e.__class__.__name__, e) self.update_build_status(build_id, "dependency_error", message) raise diff --git a/src/pakfire/shell.py b/src/pakfire/shell.py index e1f7600a8..fcdf76829 100644 --- a/src/pakfire/shell.py +++ b/src/pakfire/shell.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -25,11 +25,11 @@ import select import subprocess import time -from _pakfire import PERSONALITY_LINUX, PERSONALITY_LINUX32 +from ._pakfire import PERSONALITY_LINUX, PERSONALITY_LINUX32 from pakfire.i18n import _ import pakfire.util as util -from errors import * +from .errors import * class ShellExecuteEnvironment(object): def __init__(self, command, cwd=None, chroot_path=None, personality=None, shell=False, timeout=0, env=None, @@ -120,7 +120,7 @@ class ShellExecuteEnvironment(object): os.killpg(child.pid, 9) if not nice_exit: - raise commandTimeoutExpired, (_("Command exceeded timeout (%(timeout)d): %(command)s") % (self.timeout, self.command)) + raise commandTimeoutExpired(_("Command exceeded timeout (%(timeout)d): %(command)s") % (self.timeout, self.command)) # Save exitcode. self.exitcode = child.returncode @@ -129,7 +129,7 @@ class ShellExecuteEnvironment(object): self.logger.debug(_("Child returncode was: %s") % self.exitcode) if self.exitcode and self.log_errors: - raise ShellEnvironmentError, (_("Command failed: %s") % self.command, self.exitcode) + raise ShellEnvironmentError(_("Command failed: %s") % self.command, self.exitcode) return self.exitcode diff --git a/src/pakfire/system.py b/src/pakfire/system.py index 758a8d7ad..a4aa25521 100644 --- a/src/pakfire/system.py +++ b/src/pakfire/system.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,19 +19,16 @@ # # ############################################################################### -from __future__ import division - import multiprocessing import os import socket import tempfile -import distro -import shell +from . import shell from . import _pakfire -from i18n import _ +from .i18n import _ class System(object): """ @@ -51,6 +48,7 @@ class System(object): @property def distro(self): if not hasattr(self, "_distro"): + from . import distro self._distro = distro.Distribution() return self._distro @@ -153,8 +151,8 @@ class System(object): return ret or _("Could not be determined") @property - def cpu_bogomips(self): - return _pakfire.performance_index() + def cpu_bogomips(self): + return _pakfire.performance_index() def get_loadavg(self): return os.getloadavg() @@ -417,7 +415,7 @@ class Mountpoint(object): try: handle, path = tempfile.mkstemp(prefix="ro-test-", dir=self.fullpath) - except OSError, e: + except OSError as e: # Read-only file system. if e.errno == 30: return True @@ -442,15 +440,15 @@ class Mountpoint(object): shell=False, ) shellenv.execute() - except ShellEnvironmentError, e: + except ShellEnvironmentError as e: raise OSError if __name__ == "__main__": - print "Hostname", system.hostname - print "Arch", system.arch - print "Supported arches", system.supported_arches + print("Hostname", system.hostname) + print("Arch", system.arch) + print("Supported arches", system.supported_arches) - print "CPU Model", system.cpu_model - print "CPU count", system.cpu_count - print "Memory", system.memory + print("CPU Model", system.cpu_model) + print("CPU count", system.cpu_count) + print("Memory", system.memory) diff --git a/src/pakfire/transaction.py b/src/pakfire/transaction.py index 035598b05..0bf867a90 100644 --- a/src/pakfire/transaction.py +++ b/src/pakfire/transaction.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -20,25 +20,24 @@ ############################################################################### import os -import progressbar import sys import time -import i18n -import packages -import satsolver -import system -import util -import _pakfire +from . import _pakfire +from . import i18n +from . import packages +from . import progressbar +from . import system +from . import util import logging log = logging.getLogger("pakfire") -from constants import * -from i18n import _ +from .constants import * +from .i18n import _ # Import all actions directly. -from actions import * +from .actions import * class TransactionCheck(object): def __init__(self, pakfire, transaction): @@ -58,7 +57,7 @@ class TransactionCheck(object): def error_files(self): ret = [] - for name, count in self.filelist.items(): + for name, count in list(self.filelist.items()): if count > 1: ret.append(name) @@ -254,7 +253,7 @@ class Transaction(object): self._steps = [] self.installsizechange = 0 - def __nonzero__(self): + def __bool__(self): if self.steps: return True @@ -352,8 +351,8 @@ class Transaction(object): path_stat = os.statvfs(path) if self.download_size >= path_stat.f_bavail * path_stat.f_bsize: - raise DownloadError, _("Not enough space to download %s of packages.") \ - % util.format_size(self.download_size) + raise DownloadError(_("Not enough space to download %s of packages.") \ + % util.format_size(self.download_size)) logger.info(_("Downloading packages:")) time_start = time.time() @@ -496,7 +495,7 @@ class Transaction(object): for action in actions: try: action.check(check) - except ActionError, e: + except ActionError as e: raise if check.successful: @@ -507,7 +506,7 @@ class Transaction(object): # and raise TransactionCheckError. check.print_errors(logger=logger) - raise TransactionCheckError, _("Transaction test was not successful") + raise TransactionCheckError(_("Transaction test was not successful")) def verify_signatures(self, mode=None, logger=None): """ @@ -554,7 +553,7 @@ class Transaction(object): try: step.pkg.verify() - except SignatureError, e: + except SignatureError as e: errors.append("%s" % e) finally: if p: p.finish() @@ -566,7 +565,7 @@ class Transaction(object): # Raise a SignatureError in strict mode. if mode == "strict": - raise SignatureError, "\n".join(errors) + raise SignatureError("\n".join(errors)) elif mode == "permissive": logger.warning(_("Found %s signature error(s)!") % len(errors)) @@ -621,7 +620,7 @@ class Transaction(object): try: action.run() - except ActionError, e: + except ActionError as e: logger.error("Action finished with an error: %s - %s" % (action, e)) #except Exception, e: # logger.error(_("An unforeseen error occoured: %s") % e) diff --git a/src/pakfire/transport.py b/src/pakfire/transport.py index 7783aace1..add417c8b 100644 --- a/src/pakfire/transport.py +++ b/src/pakfire/transport.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,16 +19,13 @@ # # ############################################################################### -from __future__ import division - import base64 import hashlib import json import os import time -import urlgrabber -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse import pakfire.downloader import pakfire.util @@ -39,7 +36,6 @@ from pakfire.i18n import _ import logging log = logging.getLogger("pakfire.transport") - class PakfireHubTransportUploader(object): """ Handles the upload of a single file to the hub. @@ -211,7 +207,7 @@ class PakfireHubTransport(object): server, username, password = self.config.get_hub_credentials() # Parse the given URL. - url = urlparse.urlparse(server) + url = urllib.parse.urlparse(server) assert url.scheme in ("http", "https") # Build new URL. @@ -230,40 +226,40 @@ class PakfireHubTransport(object): try: return self.grabber.urlread(url, **kwargs) - except urlgrabber.grabber.URLGrabError, e: + except urlgrabber.grabber.URLGrabError as e: # Timeout if e.errno == 12: - raise TransportConnectionTimeoutError, e + raise TransportConnectionTimeoutError(e) # Handle common HTTP errors elif e.errno == 14: # Connection errors if e.code == 5: - raise TransportConnectionProxyError, url + raise TransportConnectionProxyError(url) elif e.code == 6: - raise TransportConnectionDNSError, url + raise TransportConnectionDNSError(url) elif e.code == 7: - raise TransportConnectionResetError, url + raise TransportConnectionResetError(url) elif e.code == 23: - raise TransportConnectionWriteError, url + raise TransportConnectionWriteError(url) elif e.code == 26: - raise TransportConnectionReadError, url + raise TransportConnectionReadError(url) # SSL errors elif e.code == 52: - raise TransportSSLCertificateExpiredError, url + raise TransportSSLCertificateExpiredError(url) # HTTP error codes elif e.code == 403: - raise TransportForbiddenError, url + raise TransportForbiddenError(url) elif e.code == 404: - raise TransportNotFoundError, url + raise TransportNotFoundError(url) elif e.code == 500: - raise TransportInternalServerError, url + raise TransportInternalServerError(url) elif e.code in (502, 503): - raise TransportBadGatewayError, url + raise TransportBadGatewayError(url) elif e.code == 504: - raise TransportConnectionTimeoutError, url + raise TransportConnectionTimeoutError(url) # All other exceptions... raise @@ -279,14 +275,14 @@ class PakfireHubTransport(object): return self.one_request(url, **kwargs) # 500 - Internal Server Error, 502 + 503 Bad Gateway Error - except (TransportInternalServerError, TransportBadGatewayError), e: + except (TransportInternalServerError, TransportBadGatewayError) as e: log.exception("%s" % e.__class__.__name__) # Wait a minute before trying again. time.sleep(60) # Retry on connection problems. - except TransportConnectionError, e: + except TransportConnectionError as e: log.exception("%s" % e.__class__.__name__) # Wait for 10 seconds. @@ -298,7 +294,7 @@ class PakfireHubTransport(object): raise TransportMaxTriesExceededError def escape_args(self, **kwargs): - return urllib.urlencode(kwargs) + return urllib.parse.urlencode(kwargs) def get(self, url, data={}, **kwargs): """ diff --git a/src/pakfire/util.py b/src/pakfire/util.py index ec6c847db..fcb81220c 100644 --- a/src/pakfire/util.py +++ b/src/pakfire/util.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 ############################################################################### # # # Pakfire - The IPFire package management system # @@ -19,13 +19,10 @@ # # ############################################################################### -from __future__ import division - import fcntl import hashlib import math import os -import progressbar import random import shutil import signal @@ -38,11 +35,11 @@ import time import logging log = logging.getLogger("pakfire") -from constants import * -from i18n import _ +from .constants import * +from .i18n import _ # Import binary version of version_compare and capability functions -from _pakfire import version_compare, get_capabilities, set_capabilities, personality +from ._pakfire import version_compare, get_capabilities, set_capabilities, personality def cli_is_interactive(): """ @@ -65,9 +62,9 @@ def ask_user(question): if not cli_is_interactive(): return True - print _("%s [y/N]") % question, - ret = raw_input() - print # Just an empty line. + print(_("%s [y/N]") % question, end=' ') + ret = input() + print() # Just an empty line. return ret in ("y", "Y", "z", "Z", "j", "J") @@ -75,11 +72,15 @@ def random_string(length=20): s = "" for i in range(length): - s += random.choice(string.letters) + s += random.choice(string.ascii_letters) return s def make_progress(message, maxval, eta=True, speed=False): + # XXX delay importing the progressbar module + # (because of a circular dependency) + from . import progressbar + # Return nothing if stdout is not a terminal. if not sys.stdout.isatty(): return @@ -117,7 +118,7 @@ def rm(path, *args, **kargs): tryAgain = 0 try: shutil.rmtree(path, *args, **kargs) - except OSError, e: + except OSError as e: if e.errno == 2: # no such file or directory pass elif e.errno==1 or e.errno==13: @@ -252,7 +253,7 @@ def orphans_kill(root, killsig=signal.SIGTERM): pid = int(fn, 10) os.kill(pid, killsig) os.waitpid(pid, 0) - except OSError, e: + except OSError as e: pass # If something was killed, wait a couple of seconds to make sure all file descriptors diff --git a/src/scripts/pakfire b/src/scripts/pakfire index 9e95dbe88..aed9e5ad8 100755 --- a/src/scripts/pakfire +++ b/src/scripts/pakfire @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 import os import sys @@ -10,9 +10,10 @@ try: from pakfire.cli import * from pakfire.i18n import _ -except ImportError, e: +except ImportError as e: # Catch ImportError and show a more user-friendly message about what # went wrong. + raise # Try to load at least the i18n support, but when this fails as well we can # go with an English error message. @@ -22,13 +23,13 @@ except ImportError, e: _ = lambda x: x # XXX Maybe we can make a more beautiful message here?! - print _("There has been an error when trying to import one or more of the" - " modules, that are required to run Pakfire.") - print _("Please check your installation of Pakfire.") - print - print _("The error that lead to this:") - print " ", e - print + print(_("There has been an error when trying to import one or more of the" + " modules, that are required to run Pakfire.")) + print(_("Please check your installation of Pakfire.")) + print() + print(_("The error that lead to this:")) + print(" ", e) + print() # Exit immediately. sys.exit(1) @@ -48,7 +49,7 @@ basename = os.path.basename(sys.argv[0]) # Check if the program was called with a weird basename. # If so, we exit immediately. -if not basename2cls.has_key(basename): +if basename not in basename2cls: sys.exit(127) # Return code for the shell. @@ -65,7 +66,7 @@ except KeyboardInterrupt: ret = 1 # Catch all errors and show a user-friendly error message. -except Error, e: +except Error as e: log.critical("") log.critical(_("An error has occured when running Pakfire.")) log.error("")