-#!/usr/bin/env python
+#!/usr/bin/env python3
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
from bb import cookerdata
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
+if sys.getfilesystemencoding() != "utf-8":
+ sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
+
__version__ = "1.31.0"
if __name__ == "__main__":
-#!/usr/bin/env python
+#!/usr/bin/env python3
# bitbake-diffsigs
# BitBake task signature data comparison utility
import fnmatch
import optparse
import logging
+import pickle
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
if len(args) == 1:
parser.print_help()
else:
- import cPickle
try:
if len(args) == 2:
output = bb.siggen.dump_sigfile(sys.argv[1])
-#!/usr/bin/env python
+#!/usr/bin/env python3
# bitbake-dumpsig
# BitBake task signature dump utility
import warnings
import optparse
import logging
+import pickle
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
if len(args) == 1:
parser.print_help()
else:
- import cPickle
try:
output = bb.siggen.dump_sigfile(args[1])
except IOError as e:
-#!/usr/bin/env python
+#!/usr/bin/env python3
# This script has subcommands which operate against your bitbake layers, either
# displaying useful information, or acting against them.
logger.setLevel(logging.INFO)
return logger
-
def logger_setup_color(logger, color='auto'):
from bb.msg import BBLogFormatter
console = logging.StreamHandler(sys.stdout)
logger = logger_create('bitbake-layers', sys.stdout)
-
def main():
parser = argparse.ArgumentParser(
description="BitBake layers utility",
parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
help='show this help message and exit')
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
+ subparsers.required = True
if global_args.debug:
logger.setLevel(logging.DEBUG)
-#!/usr/bin/env python
+#!/usr/bin/env python3
import os
import sys,logging
import optparse
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2012 Richard Purdie
#
-#!/usr/bin/env python
+#!/usr/bin/env python3
import os
import sys
import select
import errno
import signal
+import pickle
from multiprocessing import Lock
+if sys.getfilesystemencoding() != "utf-8":
+ sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
+
# Users shouldn't be running this code directly
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
# updates to log files for use with tail
try:
if sys.stdout.name == '<stdout>':
- sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+ import fcntl
+ fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
+ fl |= os.O_SYNC
+ fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
+ #sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
except:
pass
logger = logging.getLogger("BitBake")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
-
-
worker_pipe = sys.stdout.fileno()
bb.utils.nonblockingfd(worker_pipe)
# Need to guard against multiprocessing being used in child processes
consolelog.setFormatter(conlogformat)
logger.addHandler(consolelog)
-worker_queue = ""
+worker_queue = b""
def worker_fire(event, d):
- data = "<event>" + pickle.dumps(event) + "</event>"
+ data = b"<event>" + pickle.dumps(event) + b"</event>"
worker_fire_prepickled(data)
def worker_fire_prepickled(event):
global worker_pipe
global worker_pipe_lock
- data = "<event>" + pickle.dumps(event) + "</event>"
+ data = b"<event>" + pickle.dumps(event) + b"</event>"
try:
worker_pipe_lock.acquire()
worker_pipe.write(data)
bb.utils.process_profilelog(profname)
os._exit(ret)
else:
- for key, value in envbackup.iteritems():
+ for key, value in iter(envbackup.items()):
if value is None:
del os.environ[key]
else:
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = ""
+ self.queue = b""
def read(self):
start = len(self.queue)
try:
- self.queue = self.queue + self.input.read(102400)
+ self.queue = self.queue + (self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
end = len(self.queue)
- index = self.queue.find("</event>")
+ index = self.queue.find(b"</event>")
while index != -1:
worker_fire_prepickled(self.queue[:index+8])
self.queue = self.queue[index+8:]
- index = self.queue.find("</event>")
+ index = self.queue.find(b"</event>")
return (end > start)
def close(self):
def __init__(self, din):
self.input = din
bb.utils.nonblockingfd(self.input)
- self.queue = ""
+ self.queue = b""
self.cookercfg = None
self.databuilder = None
self.data = None
except (OSError, IOError):
pass
if len(self.queue):
- self.handle_item("cookerconfig", self.handle_cookercfg)
- self.handle_item("workerdata", self.handle_workerdata)
- self.handle_item("runtask", self.handle_runtask)
- self.handle_item("finishnow", self.handle_finishnow)
- self.handle_item("ping", self.handle_ping)
- self.handle_item("quit", self.handle_quit)
+ self.handle_item(b"cookerconfig", self.handle_cookercfg)
+ self.handle_item(b"workerdata", self.handle_workerdata)
+ self.handle_item(b"runtask", self.handle_runtask)
+ self.handle_item(b"finishnow", self.handle_finishnow)
+ self.handle_item(b"ping", self.handle_ping)
+ self.handle_item(b"quit", self.handle_quit)
for pipe in self.build_pipes:
self.build_pipes[pipe].read()
def handle_item(self, item, func):
- if self.queue.startswith("<" + item + ">"):
- index = self.queue.find("</" + item + ">")
+ if self.queue.startswith(b"<" + item + b">"):
+ index = self.queue.find(b"</" + item + b">")
while index != -1:
func(self.queue[(len(item) + 2):index])
self.queue = self.queue[(index + len(item) + 3):]
- index = self.queue.find("</" + item + ">")
+ index = self.queue.find(b"</" + item + b">")
def handle_cookercfg(self, data):
self.cookercfg = pickle.loads(data)
self.build_pipes[pid].close()
del self.build_pipes[pid]
- worker_fire_prepickled("<exitcode>" + pickle.dumps((task, status)) + "</exitcode>")
+ worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>")
def handle_finishnow(self, _):
if self.build_pids:
logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
- for k, v in self.build_pids.iteritems():
+ for k, v in iter(self.build_pids.items()):
try:
os.kill(-k, signal.SIGTERM)
os.waitpid(-1, 0)
self.build_pipes[pipe].read()
try:
+ sys.stdin = sys.stdin.detach()
worker = BitbakeWorker(sys.stdin)
if not profiling:
worker.serve()
-#!/usr/bin/env python
+#!/usr/bin/env python3
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Wind River Systems, Inc.
#
except RuntimeError as exc:
sys.exit(str(exc))
+from gi import pygtkcompat
+
+pygtkcompat.enable()
+pygtkcompat.enable_gtk(version='3.0')
+
import gtk
import optparse
-import pygtk
from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
-#!/usr/bin/env python
+#!/usr/bin/env python3
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
from bb.cache import CoreRecipeInfo
-import cPickle as pickle
+import pickle as pickle
def main(argv=None):
"""
# Assign a file to __warn__ to get warnings about slow operations.
#
-from __future__ import print_function
+
import copy
import types
ImmutableTypes = (
- types.NoneType,
bool,
complex,
float,
int,
- long,
tuple,
frozenset,
- basestring
+ str
)
MUTABLE = "__mutable__"
__call__ = cow
def __setitem__(cls, key, value):
- if not isinstance(value, ImmutableTypes):
+ if value is not None and not isinstance(value, ImmutableTypes):
if not isinstance(value, COWMeta):
cls.__hasmutable__ = True
key += MUTABLE
cls.__setitem__(key, cls.__marker__)
def __revertitem__(cls, key):
- if not cls.__dict__.has_key(key):
+ if key not in cls.__dict__:
key += MUTABLE
delattr(cls, key)
COWDictMeta.__delitem__(cls, repr(hash(value)))
def __in__(cls, value):
- return COWDictMeta.has_key(repr(hash(value)))
+ return repr(hash(value)) in COWDictMeta
def iterkeys(cls):
raise TypeError("sets don't have keys")
raise TypeError("sets don't have 'items'")
# These are the actual classes you use!
-class COWDictBase(object):
- __metaclass__ = COWDictMeta
+class COWDictBase(object, metaclass = COWDictMeta):
__count__ = 0
-class COWSetBase(object):
- __metaclass__ = COWSetMeta
+class COWSetBase(object, metaclass = COWSetMeta):
__count__ = 0
if __name__ == "__main__":
print()
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
- for x in b.iteritems():
+ for x in b.items():
print(x)
print()
b['a'] = 'c'
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
- for x in b.iteritems():
+ for x in b.items():
print(x)
print()
a['set'].add("o2")
print("a", a)
- for x in a['set'].itervalues():
+ for x in a['set'].values():
print(x)
print("--")
print("b", b)
- for x in b['set'].itervalues():
+ for x in b['set'].values():
print(x)
print()
b['set'].add('o3')
print("a", a)
- for x in a['set'].itervalues():
+ for x in a['set'].values():
print(x)
print("--")
print("b", b)
- for x in b['set'].itervalues():
+ for x in b['set'].values():
print(x)
print()
a['set2'].add("o2")
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
except KeyError:
print("Yay! deleted key raises error")
- if b.has_key('b'):
+ if 'b' in b:
print("Boo!")
else:
print("Yay - has_key with delete works!")
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
b.__revertitem__('b')
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
b.__revertitem__('dict')
print("a", a)
- for x in a.iteritems():
+ for x in a.items():
print(x)
print("--")
print("b", b)
mainlogger.plain(''.join(args))
def debug(lvl, *args):
- if isinstance(lvl, basestring):
+ if isinstance(lvl, str):
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
args = (lvl,) + args
lvl = 1
import bb
import bb.msg
import bb.process
-from contextlib import nested
-from bb import event, utils
+from bb import data, event, utils
bblogger = logging.getLogger('BitBake')
logger = logging.getLogger('BitBake.Build')
exit $ret
''')
- os.chmod(runfile, 0775)
+ os.chmod(runfile, 0o775)
cmd = runfile
if d.getVarFlag(func, 'fakeroot', False):
logfile = sys.stdout
def readfifo(data):
- lines = data.split('\0')
+ lines = data.split(b'\0')
for line in lines:
- splitval = line.split(' ', 1)
+ splitval = line.split(b' ', 1)
cmd = splitval[0]
if len(splitval) > 1:
- value = splitval[1]
+ value = splitval[1].decode("utf-8")
else:
value = ''
if cmd == 'bbplain':
if os.path.exists(fifopath):
os.unlink(fifopath)
os.mkfifo(fifopath)
- with open(fifopath, 'r+') as fifo:
+ with open(fifopath, 'r+b', buffering=0) as fifo:
try:
bb.debug(2, "Executing shell function %s" % func)
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
import os
+import sys
import logging
+import pickle
from collections import defaultdict
import bb.utils
logger = logging.getLogger("BitBake.Cache")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
__cache_version__ = "150"
def getCacheFile(path, filename, data_hash):
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
for var in varlist)
if squash:
- return dict((k,v) for (k,v) in out_dict.iteritems() if v)
+ return dict((k,v) for (k,v) in out_dict.items() if v)
else:
return out_dict
cachedata.universe_target.append(self.pn)
cachedata.hashfn[fn] = self.hashfilename
- for task, taskhash in self.basetaskhashes.iteritems():
+ for task, taskhash in self.basetaskhashes.items():
identifier = '%s.%s' % (fn, task)
cachedata.basetaskhash[identifier] = taskhash
infos = []
datastores = cls.load_bbfile(filename, appends, configdata)
depends = []
- for variant, data in sorted(datastores.iteritems(),
+ for variant, data in sorted(datastores.items(),
key=lambda i: i[0],
reverse=True):
virtualfn = cls.realfn2virtual(filename, variant)
pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
try:
- for key, info_array in self.depends_cache.iteritems():
+ for key, info_array in self.depends_cache.items():
for info in info_array:
if isinstance(info, RecipeInfoCommon):
cache_class_name = info.__class__.__name__
import operator
import os
import stat
+import pickle
import bb.utils
import logging
from bb.cache import MultiProcessCache
logger = logging.getLogger("BitBake.Cache")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-
# mtime cache (non-persistent)
# based upon the assumption that files do not change during bitbake run
class FileMtimeCache(object):
import ast
+import sys
import codegen
import logging
+import pickle
+import bb.pysh as pysh
import os.path
import bb.utils, bb.data
from itertools import chain
-from pysh import pyshyacc, pyshlex, sherrors
+from bb.pysh import pyshyacc, pyshlex, sherrors
from bb.cache import MultiProcessCache
-
logger = logging.getLogger('BitBake.CodeParser')
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-
def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
new = []
for i in items:
- new.append(intern(i))
+ new.append(sys.intern(i))
s = frozenset(new)
if hash(s) in self.setcache:
return self.setcache[hash(s)]
return False
except SystemExit as exc:
arg = exc.args[0]
- if isinstance(arg, basestring):
+ if isinstance(arg, str):
self.finishAsyncCommand(arg)
else:
self.finishAsyncCommand("Exited with %s" % arg)
import multiprocessing
import sre_constants
import threading
-from cStringIO import StringIO
+from io import StringIO
from contextlib import closing
from functools import wraps
from collections import defaultdict
import bb, bb.exceptions, bb.command
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
-import Queue
+import queue
import signal
import subprocess
import errno
"""
class state:
- initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
+ initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
@classmethod
def get_name(cls, code):
class CookerFeatures(object):
- _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(3)
+ _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
def __init__(self):
self._features=set()
def __iter__(self):
return self._features.__iter__()
- def next(self):
- return self._features.next()
+ def __next__(self):
+ return next(self._features)
#============================================================================#
depend_tree['providermap'] = {}
depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
- for name, fn in taskdata.get_providermap().iteritems():
+ for name, fn in list(taskdata.get_providermap().items()):
pn = self.recipecache.pkg_fn[fn]
if name != pn:
version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
depend_tree['providermap'][name] = (pn, version)
- for task in xrange(len(rq.rqdata.runq_fnid)):
+ for task in range(len(rq.rqdata.runq_fnid)):
taskname = rq.rqdata.runq_task[task]
fnid = rq.rqdata.runq_fnid[task]
fn = taskdata.fn_index[fnid]
_, taskdata = self.prepareTreeData(pkgs_to_build, task)
tasks_fnid = []
if len(taskdata.tasks_name) != 0:
- for task in xrange(len(taskdata.tasks_name)):
+ for task in range(len(taskdata.tasks_name)):
tasks_fnid.append(taskdata.tasks_fnid[task])
seen_fnids = []
cachefields = getattr(cache_class, 'cachefields', [])
extra_info = extra_info + cachefields
- for task in xrange(len(tasks_fnid)):
+ for task in range(len(tasks_fnid)):
fnid = tasks_fnid[task]
fn = taskdata.fn_index[fnid]
pn = self.recipecache.pkg_fn[fn]
# Determine which bbappends haven't been applied
# First get list of recipes, including skipped
- recipefns = self.recipecache.pkg_fn.keys()
+ recipefns = list(self.recipecache.pkg_fn.keys())
recipefns.extend(self.skiplist.keys())
# Work out list of bbappends that have been applied
deplist = bb.utils.explode_dep_versions2(deps)
except bb.utils.VersionStringException as vse:
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
- for dep, oplist in deplist.iteritems():
+ for dep, oplist in list(deplist.items()):
if dep in collection_list:
for opstr in oplist:
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
while True:
try:
quit = self.quit.get_nowait()
- except Queue.Empty:
+ except queue.Empty:
pass
else:
if quit == 'cancel':
try:
self.to_parsers.put(job, timeout=0.5)
- except Queue.Full:
+ except queue.Full:
self.jobs.insert(0, job)
continue
while True:
try:
self.quit.get_nowait()
- except Queue.Empty:
+ except queue.Empty:
pass
else:
self.results.cancel_join_thread()
else:
try:
job = self.jobs.get(timeout=0.25)
- except Queue.Empty:
+ except queue.Empty:
continue
if job is None:
try:
self.results.put(result, timeout=0.25)
- except Queue.Full:
+ except queue.Full:
pending.append(result)
def parse(self, filename, appends, caches_array):
try:
result = self.result_queue.get(timeout=0.25)
- except Queue.Empty:
+ except queue.Empty:
pass
else:
value = result[1]
result = []
parsed = None
try:
- parsed, result = self.results.next()
+ parsed, result = next(self.results)
except StopIteration:
self.shutdown()
return False
def expandWithRefs(self, s, varname):
- if not isinstance(s, basestring): # sanity check
+ if not isinstance(s, str): # sanity check
return VariableParse(varname, self, s)
if varname and varname in self.expand_cache:
data.update({i:value})
data_str = str([(k, data[k]) for k in sorted(data.keys())])
- return hashlib.md5(data_str).hexdigest()
+ return hashlib.md5(data_str.encode("utf-8")).hexdigest()
import os, sys
import warnings
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
+import pickle
import logging
import atexit
import traceback
eid = str(event.__class__)[8:-2]
evt_hmap = _event_handler_map.get(eid, {})
- for name, handler in _handlers.iteritems():
+ for name, handler in list(_handlers.items()):
if name in _catchall_handlers or name in evt_hmap:
if _eventfilter:
if not _eventfilter(name, handler, event, d):
if handler is not None:
# handle string containing python code
- if isinstance(handler, basestring):
+ if isinstance(handler, str):
tmp = "def %s(e):\n%s" % (name, handler)
try:
code = bb.methodpool.compile_cache(tmp)
def to_string(exc):
if isinstance(exc, SystemExit):
- if not isinstance(exc.code, basestring):
+ if not isinstance(exc.code, str):
return 'Exited with "%d"' % exc.code
return str(exc)
import os, re
import signal
import logging
-import urllib
-import urlparse
+import urllib.request, urllib.parse, urllib.error
+if 'git' not in urllib.parse.uses_netloc:
+ urllib.parse.uses_netloc.append('git')
+import operator
import collections
+import subprocess
+import pickle
import bb.persist_data, bb.utils
import bb.checksum
from bb import data
import bb.process
-import subprocess
__version__ = "2"
_checksum_cache = bb.checksum.FileChecksumCache()
logger = logging.getLogger("BitBake.Fetcher")
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
class BBFetchException(Exception):
"""Class all fetch exceptions inherit from"""
def __init__(self, message):
# them are not quite RFC compliant.
uri, param_str = (uri.split(";", 1) + [None])[:2]
- urlp = urlparse.urlparse(uri)
+ urlp = urllib.parse.urlparse(uri)
self.scheme = urlp.scheme
reparse = 0
# Coerce urlparse to make URI scheme use netloc
- if not self.scheme in urlparse.uses_netloc:
- urlparse.uses_params.append(self.scheme)
+ if not self.scheme in urllib.parse.uses_netloc:
+ urllib.parse.uses_params.append(self.scheme)
reparse = 1
# Make urlparse happy(/ier) by converting local resources
reparse = 1
if reparse:
- urlp = urlparse.urlparse(uri)
+ urlp = urllib.parse.urlparse(uri)
# Identify if the URI is relative or not
if urlp.scheme in self._relative_schemes and \
if urlp.password:
self.userinfo += ':%s' % urlp.password
- self.path = urllib.unquote(urlp.path)
+ self.path = urllib.parse.unquote(urlp.path)
if param_str:
self.params = self._param_str_split(param_str, ";")
@property
def path_quoted(self):
- return urllib.quote(self.path)
+ return urllib.parse.quote(self.path)
@path_quoted.setter
def path_quoted(self, path):
- self.path = urllib.unquote(path)
+ self.path = urllib.parse.unquote(path)
@property
def path(self):
s1, s2 = s.split('=')
p[s1] = s2
- return type, host, urllib.unquote(path), user, pswd, p
+ return type, host, urllib.parse.unquote(path), user, pswd, p
def encodeurl(decoded):
"""Encodes a URL from tokens (scheme, network location, path,
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
- url += "%s" % urllib.quote(path)
+ url += "%s" % urllib.parse.quote(path)
if p:
for parm in p:
url += ";%s=%s" % (parm, p[parm])
del self.cache[cn]
def close_connections(self):
- for cn in self.cache.keys():
+ for cn in list(self.cache.keys()):
self.cache[cn].close()
del self.cache[cn]
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
import bb
import bb.utils
from bb import data
def urldata_init(self, ud, d):
# We don't set localfile as for this fetcher the file is already local!
- ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
+ ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
ud.basename = os.path.basename(ud.decodedurl)
ud.basepath = ud.decodedurl
ud.needdonestamp = False
import os
import sys
-import urllib
+import urllib.request, urllib.parse, urllib.error
import json
import subprocess
import signal
optdepsfound[dep] = dependencies[dep]
else:
depsfound[dep] = dependencies[dep]
- for dep, version in optdepsfound.iteritems():
+ for dep, version in optdepsfound.items():
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
- for dep, version in depsfound.iteritems():
+ for dep, version in depsfound.items():
self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
keys.append(key)
values.append(value)
- parm = dict(zip(keys, values))
+ parm = dict(list(zip(keys, values)))
path = "//" + path.split(';')[0]
host += ":%s" % (port)
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
import os
import bb
-import urllib
-import commands
+import urllib.request, urllib.parse, urllib.error
from bb import data
from bb.fetch2 import URI
from bb.fetch2 import FetchMethod
else:
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
def download(self, ud, d):
"""Fetch urls"""
remote = '%s%s:%s' % (user, urlo.hostname, path)
- cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
- commands.mkarg(lpath))
+ cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
bb.fetch2.check_network_access(d, cmd, ud.url)
runfetchcmd(cmd, d)
fr = host
fr += ':%s' % path
-
- import commands
cmd = 'scp -B -r %s %s %s/' % (
portarg,
- commands.mkarg(fr),
- commands.mkarg(dldir)
+ fr,
+ dldir
)
bb.fetch2.check_network_access(d, cmd, urldata.url)
import os
import logging
import bb
-import urllib
+import urllib.request, urllib.parse, urllib.error
from bb import data
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
else:
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
if not ud.localfile:
- ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
+ ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
return True
def checkstatus(self, fetch, ud, d):
- import urllib2, socket, httplib
- from urllib import addinfourl
+ import urllib.request, urllib.error, urllib.parse, socket, http.client
+ from urllib.response import addinfourl
from bb.fetch2 import FetchConnectionCache
- class HTTPConnectionCache(httplib.HTTPConnection):
+ class HTTPConnectionCache(http.client.HTTPConnection):
if fetch.connection_cache:
def connect(self):
"""Connect to the host and port specified in __init__."""
if self._tunnel_host:
self._tunnel()
- class CacheHTTPHandler(urllib2.HTTPHandler):
+ class CacheHTTPHandler(urllib.request.HTTPHandler):
def http_open(self, req):
return self.do_open(HTTPConnectionCache, req)
- geturl(): return the original request URL
- code: HTTP status code
"""
- host = req.get_host()
+ host = req.host
if not host:
raise urlllib2.URLError('no host given')
h.set_debuglevel(self._debuglevel)
headers = dict(req.unredirected_hdrs)
- headers.update(dict((k, v) for k, v in req.headers.items()
+ headers.update(dict((k, v) for k, v in list(req.headers.items())
if k not in headers))
# We want to make an HTTP/1.1 request, but the addinfourl
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
headers = dict(
- (name.title(), val) for name, val in headers.items())
+ (name.title(), val) for name, val in list(headers.items()))
if req._tunnel_host:
tunnel_headers = {}
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
- h.request(req.get_method(), req.get_selector(), req.data, headers)
- except socket.error, err: # XXX what error?
+ h.request(req.get_method(), req.selector, req.data, headers)
+ except socket.error as err: # XXX what error?
# Don't close connection when cache is enabled.
if fetch.connection_cache is None:
h.close()
- raise urllib2.URLError(err)
+ raise urllib.error.URLError(err)
else:
try:
r = h.getresponse(buffering=True)
return resp
- class HTTPMethodFallback(urllib2.BaseHandler):
+ class HTTPMethodFallback(urllib.request.BaseHandler):
"""
Fallback to GET if HEAD is not allowed (405 HTTP error)
"""
fp.read()
fp.close()
- newheaders = dict((k,v) for k,v in req.headers.items()
+ newheaders = dict((k,v) for k,v in list(req.headers.items())
if k.lower() not in ("content-length", "content-type"))
- return self.parent.open(urllib2.Request(req.get_full_url(),
+ return self.parent.open(urllib.request.Request(req.get_full_url(),
headers=newheaders,
- origin_req_host=req.get_origin_req_host(),
+ origin_req_host=req.origin_req_host,
unverifiable=True))
"""
"""
http_error_406 = http_error_405
- class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
+ class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
"""
urllib2.HTTPRedirectHandler resets the method to GET on redirect,
when we want to follow redirects using the original method.
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
- newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
+ newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
newreq.get_method = lambda: req.get_method()
return newreq
exported_proxies = export_proxies(d)
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
if export_proxies:
- handlers.append(urllib2.ProxyHandler())
+ handlers.append(urllib.request.ProxyHandler())
handlers.append(CacheHTTPHandler())
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
# see PEP-0476, this causes verification errors on some https servers
# so disable by default.
import ssl
if hasattr(ssl, '_create_unverified_context'):
- handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
- opener = urllib2.build_opener(*handlers)
+ handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
+ opener = urllib.request.build_opener(*handlers)
try:
uri = ud.url.split(";")[0]
- r = urllib2.Request(uri)
+ r = urllib.request.Request(uri)
r.get_method = lambda: "HEAD"
opener.open(r)
- except urllib2.URLError as e:
+ except urllib.error.URLError as e:
# debug for now to avoid spamming the logs in e.g. remote sstate searches
logger.debug(2, "checkstatus() urlopen failed: %s" % e)
return False
import logging
import optparse
import warnings
+import fcntl
import bb
from bb import event
server.saveConnectionDetails()
except Exception as e:
while hasattr(server, "event_queue"):
- try:
- import queue
- except ImportError:
- import Queue as queue
+ import queue
try:
event = server.event_queue.get(block=False)
except (queue.Empty, IOError):
# updates to log files for use with tail
try:
if sys.stdout.name == '<stdout>':
- sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+ # Reopen with O_SYNC (unbuffered)
+ fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
+ fl |= os.O_SYNC
+ fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
except:
pass
}
color_enabled = False
- BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
+ BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(29,38))
COLORS = {
DEBUG3 : CYAN,
data.setVar(key, val, parsing=True, **loginfo)
class MethodNode(AstNode):
- tr_tbl = string.maketrans('/.+-@%&', '_______')
+ tr_tbl = str.maketrans('/.+-@%&', '_______')
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
AstNode.__init__(self, filename, lineno)
function(arg or name, new_d)
datastores[name] = new_d
- for variant, variant_d in datastores.items():
+ for variant in list(datastores.keys()):
for name in names:
if not variant:
# Based on main recipe
- create_variant(name, variant_d)
+ create_variant(name, datastores[""])
else:
- create_variant("%s-%s" % (variant, name), variant_d, name)
+ create_variant("%s-%s" % (variant, name), datastores[variant], name)
def _expand_versions(versions):
def expand_one(version, start, end):
- for i in xrange(start, end + 1):
+ for i in range(start, end + 1):
ver = _bbversions_re.sub(str(i), version, 1)
yield ver
safe_d.setVar("BBCLASSEXTEND", extended)
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
- for variant, variant_d in datastores.iteritems():
+ for variant in datastores.keys():
if variant:
try:
if not onlyfinalise or variant in onlyfinalise:
- finalize(fn, variant_d, variant)
+ finalize(fn, datastores[variant], variant)
except bb.parse.SkipRecipe as e:
- variant_d.setVar("__SKIPPED", e.args[0])
+ datastores[variant].setVar("__SKIPPED", e.args[0])
if len(datastores) > 1:
- variants = filter(None, datastores.iterkeys())
+ variants = filter(None, datastores.keys())
safe_d.setVar("__VARIANTS", " ".join(variants))
datastores[""] = d
self._execute("DELETE from %s where key=?;" % self.table, [key])
def __setitem__(self, key, value):
- if not isinstance(key, basestring):
+ if not isinstance(key, str):
raise TypeError('Only string keys are supported')
- elif not isinstance(value, basestring):
+ elif not isinstance(value, str):
raise TypeError('Only string values are supported')
data = self._execute("SELECT * from %s where key=?;" %
return [row[1] for row in data]
def values(self):
- return list(self.itervalues())
+ return list(self.values())
def itervalues(self):
data = self._execute("SELECT value FROM %s;" % self.table)
return (row[0] for row in data)
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def iteritems(self):
return self._execute("SELECT * FROM %s;" % self.table)
"""
Return a list of key + value pairs for a domain
"""
- return self.data[domain].items()
+ return list(self.data[domain].items())
def getValue(self, domain, key):
"""
self.msg = msg
def __str__(self):
- if not isinstance(self.command, basestring):
+ if not isinstance(self.command, str):
cmd = subprocess.list2cmdline(self.command)
else:
cmd = self.command
try:
while pipe.poll() is None:
rlist = rin
+ stdoutbuf = b""
+ stderrbuf = b""
try:
r,w,e = select.select (rlist, [], [], 1)
except OSError as e:
raise
if pipe.stdout in r:
- data = pipe.stdout.read()
- if data is not None:
- outdata.append(data)
- log.write(data)
+ data = stdoutbuf + pipe.stdout.read()
+ if data is not None and len(data) > 0:
+ try:
+ data = data.decode("utf-8")
+ outdata.append(data)
+ log.write(data)
+ stdoutbuf = b""
+ except UnicodeDecodeError:
+ stdoutbuf = data
if pipe.stderr in r:
- data = pipe.stderr.read()
- if data is not None:
- errdata.append(data)
- log.write(data)
+ data = stderrbuf + pipe.stderr.read()
+ if data is not None and len(data) > 0:
+ try:
+ data = data.decode("utf-8")
+ errdata.append(data)
+ log.write(data)
+ stderrbuf = b""
+ except UnicodeDecodeError:
+ stderrbuf = data
readextras(r)
if not extrafiles:
extrafiles = []
- if isinstance(cmd, basestring) and not "shell" in options:
+ if isinstance(cmd, str) and not "shell" in options:
options["shell"] = True
try:
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
else:
stdout, stderr = pipe.communicate(input)
+ if stdout:
+ stdout = stdout.decode("utf-8")
+ if stderr:
+ stderr = stderr.decode("utf-8")
if pipe.returncode != 0:
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
pkg_pn[pn] = []
pkg_pn[pn].append(p)
- logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
+ logger.debug(1, "providers for %s are: %s", item, list(pkg_pn.keys()))
# First add PREFERRED_VERSIONS
for pn in pkg_pn:
print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
# Scan pattern arguments and append a space if necessary
- for i in xrange(len(args)):
+ for i in range(len(args)):
if not RE_SED.search(args[i]):
continue
args[i] = args[i] + ' '
"""
# Save and remove previous arguments
prevargs = []
- for i in xrange(int(self._env['#'])):
+ for i in range(int(self._env['#'])):
i = str(i+1)
prevargs.append(self._env[i])
del self._env[i]
return prevargs
def get_positional_args(self):
- return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
+ return [self._env[str(i+1)] for i in range(int(self._env['#']))]
def get_variables(self):
return dict(self._env)
from Set import Set as set
from ply import lex
-from sherrors import *
+from bb.pysh.sherrors import *
class NeedMore(Exception):
pass
import os.path
import sys
-import pyshlex
+import bb.pysh.pyshlex as pyshlex
tokens = pyshlex.tokens
from ply import yacc
-import sherrors
+import bb.pysh.sherrors as sherrors
class IORedirect:
def __init__(self, op, filename, io_number=None):
from bb import msg, data, event
from bb import monitordisk
import subprocess
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
+import pickle
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
self.buildable = []
self.stamps = {}
- for taskid in xrange(self.numTasks):
+ for taskid in range(self.numTasks):
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]]
taskname = self.rqdata.runq_task[taskid]
self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
if len(self.buildable) == 1:
taskid = self.buildable[0]
stamp = self.stamps[taskid]
- if stamp not in self.rq.build_stamps.itervalues():
+ if stamp not in self.rq.build_stamps.values():
return taskid
if not self.rev_prio_map:
- self.rev_prio_map = range(self.numTasks)
- for taskid in xrange(self.numTasks):
+ self.rev_prio_map = list(range(self.numTasks))
+ for taskid in range(self.numTasks):
self.rev_prio_map[self.prio_map[taskid]] = taskid
best = None
prio = self.rev_prio_map[taskid]
if bestprio is None or bestprio > prio:
stamp = self.stamps[taskid]
- if stamp in self.rq.build_stamps.itervalues():
+ if stamp in self.rq.build_stamps.values():
continue
bestprio = prio
best = taskid
def get_task_id(self, fnid, taskname):
- for listid in xrange(len(self.runq_fnid)):
+ for listid in range(len(self.runq_fnid)):
if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
return listid
return None
"""
lowest = 0
new_chain = []
- for entry in xrange(len(chain)):
+ for entry in range(len(chain)):
if chain[entry] < chain[lowest]:
lowest = entry
new_chain.extend(chain[lowest:])
"""
if len(chain1) != len(chain2):
return False
- for index in xrange(len(chain1)):
+ for index in range(len(chain1)):
if chain1[index] != chain2[index]:
return False
return True
deps_left = []
task_done = []
- for listid in xrange(numTasks):
+ for listid in range(numTasks):
task_done.append(False)
weight.append(1)
deps_left.append(len(self.runq_revdeps[listid]))
# Circular dependency sanity check
problem_tasks = []
- for task in xrange(numTasks):
+ for task in range(numTasks):
if task_done[task] is False or deps_left[task] != 0:
problem_tasks.append(task)
logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
if taskid is not None:
depends.add(taskid)
- for task in xrange(len(taskData.tasks_name)):
+ for task in range(len(taskData.tasks_name)):
depends = set()
fnid = taskData.tasks_fnid[task]
fn = taskData.fn_index[fnid]
for task in recursivetasks:
extradeps[task].difference_update(recursivetasksselfref)
- for task in xrange(len(taskData.tasks_name)):
+ for task in range(len(taskData.tasks_name)):
# Add in extra dependencies
if task in extradeps:
self.runq_depends[task] = extradeps[task]
maps = []
delcount = 0
- for listid in xrange(len(self.runq_fnid)):
+ for listid in range(len(self.runq_fnid)):
if runq_build[listid-delcount] == 1:
maps.append(listid-delcount)
else:
# Remap the dependencies to account for the deleted tasks
# Check we didn't delete a task we depend on
- for listid in xrange(len(self.runq_fnid)):
+ for listid in range(len(self.runq_fnid)):
newdeps = []
origdeps = self.runq_depends[listid]
for origdep in origdeps:
logger.verbose("Assign Weightings")
# Generate a list of reverse dependencies to ease future calculations
- for listid in xrange(len(self.runq_fnid)):
+ for listid in range(len(self.runq_fnid)):
for dep in self.runq_depends[listid]:
self.runq_revdeps[dep].add(listid)
# Identify tasks at the end of dependency chains
# Error on circular dependency loops (length two)
endpoints = []
- for listid in xrange(len(self.runq_fnid)):
+ for listid in range(len(self.runq_fnid)):
revdeps = self.runq_revdeps[listid]
if len(revdeps) == 0:
endpoints.append(listid)
# Sanity Check - Check for multiple tasks building the same provider
prov_list = {}
seen_fn = []
- for task in xrange(len(self.runq_fnid)):
+ for task in range(len(self.runq_fnid)):
fn = taskData.fn_index[self.runq_fnid[task]]
if fn in seen_fn:
continue
Dump some debug information on the internal data structures
"""
logger.debug(3, "run_tasks:")
- for task in xrange(len(self.rqdata.runq_task)):
+ for task in range(len(self.rqdata.runq_task)):
logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
taskQueue.fn_index[self.rqdata.runq_fnid[task]],
self.rqdata.runq_task[task],
self.rqdata.runq_revdeps[task])
logger.debug(3, "sorted_tasks:")
- for task1 in xrange(len(self.rqdata.runq_task)):
+ for task1 in range(len(self.rqdata.runq_task)):
if task1 in self.prio_map:
task = self.prio_map[task1]
logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
"time" : self.cfgData.getVar("TIME", True),
}
- worker.stdin.write("<cookerconfig>" + pickle.dumps(self.cooker.configuration) + "</cookerconfig>")
- worker.stdin.write("<workerdata>" + pickle.dumps(workerdata) + "</workerdata>")
+ worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
+ worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>")
worker.stdin.flush()
return worker, workerpipe
return
logger.debug(1, "Teardown for bitbake-worker")
try:
- worker.stdin.write("<quit></quit>")
+ worker.stdin.write(b"<quit></quit>")
worker.stdin.flush()
+ worker.stdin.close()
except IOError:
pass
while worker.returncode is None:
stamppresent = []
valid_new = set()
- for task in xrange(len(self.rqdata.runq_fnid)):
+ for task in range(len(self.rqdata.runq_fnid)):
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
taskname = self.rqdata.runq_task[task]
taskdep = self.rqdata.dataCache.task_deps[fn]
valid_new.add(dep)
invalidtasks = set()
- for task in xrange(len(self.rqdata.runq_fnid)):
+ for task in range(len(self.rqdata.runq_fnid)):
if task not in valid_new and task not in noexec:
invalidtasks.add(task)
match = m
if match is None:
bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
- matches = {k : v for k, v in matches.iteritems() if h not in k}
+ matches = {k : v for k, v in iter(matches.items()) if h not in k}
if matches:
latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
prevh = __find_md5__.search(latestmatch).group(0)
return True
def finish_now(self):
-
for worker in [self.rq.worker, self.rq.fakeworker]:
if not worker:
continue
try:
- worker.stdin.write("<finishnow></finishnow>")
+ worker.stdin.write(b"<finishnow></finishnow>")
worker.stdin.flush()
except IOError:
# worker must have died?
pass
-
if len(self.failed_fnids) != 0:
self.rq.state = runQueueFailed
return
initial_covered = self.rq.scenequeue_covered.copy()
# Mark initial buildable tasks
- for task in xrange(self.stats.total):
+ for task in range(self.stats.total):
self.runq_running.append(0)
self.runq_complete.append(0)
if len(self.rqdata.runq_depends[task]) == 0:
found = True
while found:
found = False
- for task in xrange(self.stats.total):
+ for task in range(self.stats.total):
if task in self.rq.scenequeue_covered:
continue
logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
covered_remove = set()
if self.rq.setsceneverify:
invalidtasks = []
- for task in xrange(len(self.rqdata.runq_task)):
+ for task in range(len(self.rqdata.runq_task)):
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
taskname = self.rqdata.runq_task[task]
taskdep = self.rqdata.dataCache.task_deps[fn]
logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
self.rq.state = runQueueFailed
return True
- self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
+ self.rq.fakeworker.stdin.write(b"<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"</runtask>")
self.rq.fakeworker.stdin.flush()
else:
- self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
+ self.rq.worker.stdin.write(b"<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"</runtask>")
self.rq.worker.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
return True
# Sanity Checks
- for task in xrange(self.stats.total):
+ for task in range(self.stats.total):
if self.runq_buildable[task] == 0:
logger.error("Task %s never buildable!", task)
if self.runq_running[task] == 0:
# therefore aims to collapse the huge runqueue dependency tree into a smaller one
# only containing the setscene functions.
- for task in xrange(self.stats.total):
+ for task in range(self.stats.total):
self.runq_running.append(0)
self.runq_complete.append(0)
self.runq_buildable.append(0)
# First process the chains up to the first setscene task.
endpoints = {}
- for task in xrange(len(self.rqdata.runq_fnid)):
+ for task in range(len(self.rqdata.runq_fnid)):
sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
sq_revdeps_new.append(set())
if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
newendpoints[dep] = tasks
if len(newendpoints) != 0:
process_endpoints2(newendpoints)
- for task in xrange(len(self.rqdata.runq_fnid)):
+ for task in range(len(self.rqdata.runq_fnid)):
sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task]))
sq_revdeps_new2.append(set())
if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
if sq_revdeps_new2[task]:
self.unskippable.append(self.rqdata.runq_setscene.index(task))
- for task in xrange(len(self.rqdata.runq_fnid)):
+ for task in range(len(self.rqdata.runq_fnid)):
if task in self.rqdata.runq_setscene:
deps = set()
for dep in sq_revdeps_new[task]:
for dep in self.sq_harddeps[task]:
sq_revdeps_squash[dep].add(task)
- #for task in xrange(len(sq_revdeps_squash)):
+ #for task in range(len(sq_revdeps_squash)):
# realtask = self.rqdata.runq_setscene[task]
# bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task]))
self.sq_revdeps = sq_revdeps_squash
self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
- for task in xrange(len(self.sq_revdeps)):
+ for task in range(len(self.sq_revdeps)):
self.sq_deps.append(set())
- for task in xrange(len(self.sq_revdeps)):
+ for task in range(len(self.sq_revdeps)):
for dep in self.sq_revdeps[task]:
self.sq_deps[dep].add(task)
- for task in xrange(len(self.sq_revdeps)):
+ for task in range(len(self.sq_revdeps)):
if len(self.sq_revdeps[task]) == 0:
self.runq_buildable[task] = 1
sq_task = []
noexec = []
stamppresent = []
- for task in xrange(len(self.sq_revdeps)):
+ for task in range(len(self.sq_revdeps)):
realtask = self.rqdata.runq_setscene[task]
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
taskname = self.rqdata.runq_task[realtask]
for v in valid:
valid_new.append(sq_task[v])
- for task in xrange(len(self.sq_revdeps)):
+ for task in range(len(self.sq_revdeps)):
if task not in valid_new and task not in noexec:
realtask = self.rqdata.runq_setscene[task]
logger.debug(2, 'No package found, so skipping setscene task %s',
task = None
if self.stats.active < self.number_tasks:
# Find the next setscene to run
- for nexttask in xrange(self.stats.total):
+ for nexttask in range(self.stats.total):
if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
if nexttask in self.unskippable:
logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not self.rq.fakeworker:
self.rq.start_fakeworker(self)
- self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
+ self.rq.fakeworker.stdin.write(b"<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + b"</runtask>")
self.rq.fakeworker.stdin.flush()
else:
- self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
+ self.rq.worker.stdin.write(b"<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + b"</runtask>")
self.rq.worker.stdin.flush()
self.runq_running[task] = 1
self.rq.read_workers()
return self.rq.active_fds()
- #for task in xrange(self.stats.total):
+ #for task in range(self.stats.total):
# if self.runq_running[task] != 1:
# buildable = self.runq_buildable[task]
# revdeps = self.sq_revdeps[task]
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = ""
+ self.queue = b""
self.d = d
self.rq = rq
self.rqexec = rqexec
start = len(self.queue)
try:
- self.queue = self.queue + self.input.read(102400)
+ self.queue = self.queue + (self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
found = True
while found and len(self.queue):
found = False
- index = self.queue.find("</event>")
- while index != -1 and self.queue.startswith("<event>"):
+ index = self.queue.find(b"</event>")
+ while index != -1 and self.queue.startswith(b"<event>"):
try:
event = pickle.loads(self.queue[7:index])
except ValueError as e:
bb.event.fire_from_worker(event, self.d)
found = True
self.queue = self.queue[index+8:]
- index = self.queue.find("</event>")
- index = self.queue.find("</exitcode>")
- while index != -1 and self.queue.startswith("<exitcode>"):
+ index = self.queue.find(b"</event>")
+ index = self.queue.find(b"</exitcode>")
+ while index != -1 and self.queue.startswith(b"<exitcode>"):
try:
task, status = pickle.loads(self.queue[10:index])
except ValueError as e:
self.rqexec.runqueue_process_waitpid(task, status)
found = True
self.queue = self.queue[index+11:]
- index = self.queue.find("</exitcode>")
+ index = self.queue.find(b"</exitcode>")
return (end > start)
def close(self):
import sys
import time
import select
-from Queue import Empty
+from queue import Empty
from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
if not fds:
fds = []
- for function, data in self._idlefuns.items():
+ for function, data in list(self._idlefuns.items()):
try:
retval = function(self, data, False)
if retval is False:
nextsleep = None
elif retval is True:
nextsleep = None
- elif isinstance(retval, float):
+ elif isinstance(retval, float) and nextsleep:
if (retval < nextsleep):
nextsleep = retval
elif nextsleep is None:
# Wrap Queue to provide API which isn't server implementation specific
class ProcessEventQueue(multiprocessing.queues.Queue):
def __init__(self, maxsize):
- multiprocessing.queues.Queue.__init__(self, maxsize)
+ multiprocessing.queues.Queue.__init__(self, maxsize, ctx=multiprocessing.get_context())
self.exit = False
bb.utils.set_process_name("ProcessEQueue")
in the server's main loop.
"""
+import os
+import sys
+
+import hashlib
+import time
+import socket
+import signal
+import threading
+import pickle
+import inspect
+import select
+import http.client
+import xmlrpc.client
+from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+
import bb
-import xmlrpclib, sys
from bb import daemonize
from bb.ui import uievent
-import hashlib, time
-import socket
-import os, signal
-import threading
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
+from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
DEBUG = False
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select, httplib
-
-from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
-
-class BBTransport(xmlrpclib.Transport):
+class BBTransport(xmlrpc.client.Transport):
def __init__(self, timeout):
self.timeout = timeout
self.connection_token = None
- xmlrpclib.Transport.__init__(self)
+ xmlrpc.client.Transport.__init__(self)
# Modified from default to pass timeout to HTTPConnection
def make_connection(self, host):
# create a HTTP connection object from a host descriptor
chost, self._extra_headers, x509 = self.get_host_info(host)
#store the host argument along with the connection object
- self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
+ self._connection = host, http.client.HTTPConnection(chost, timeout=self.timeout)
return self._connection[1]
def set_connection_token(self, token):
def send_content(self, h, body):
if self.connection_token:
h.putheader("Bitbake-token", self.connection_token)
- xmlrpclib.Transport.send_content(self, h, body)
+ xmlrpc.client.Transport.send_content(self, h, body)
def _create_server(host, port, timeout = 60):
t = BBTransport(timeout)
- s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
+ s = xmlrpc.client.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True, use_builtin_types=True)
return s, t
class BitBakeServerCommands():
def addClient(self):
if self.has_client:
return None
- token = hashlib.md5(str(time.time())).hexdigest()
+ token = hashlib.md5(str(time.time()).encode("utf-8")).hexdigest()
self.server.set_connection_token(token)
self.has_client = True
return token
while not self.quit:
fds = [self]
nextsleep = 0.1
- for function, data in self._idlefuns.items():
+ for function, data in list(self._idlefuns.items()):
retval = None
try:
retval = function(self, data, False)
pass
# Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
+ for function, data in list(self._idlefuns.items()):
try:
retval = function(self, data, True)
except:
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
raise e
try:
- self.serverImpl = XMLRPCProxyServer(host, port)
+ self.serverImpl = XMLRPCProxyServer(host, port, use_builtin_types=True)
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
return self.connection.connect(self.token)
except Exception as e:
import os
import re
import tempfile
+import pickle
import bb.data
from bb.checksum import FileChecksumCache
logger = logging.getLogger('BitBake.SigGen')
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
def init(d):
- siggens = [obj for obj in globals().itervalues()
+ siggens = [obj for obj in globals().values()
if type(obj) is type and issubclass(obj, SignatureGenerator)]
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
var = lookupcache[dep]
if var is not None:
data = data + str(var)
- self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
+ self.basehash[fn + "." + task] = hashlib.md5(data.encode("utf-8")).hexdigest()
taskdeps[task] = alldeps
self.taskdeps[fn] = taskdeps
self.taints[k] = taint
logger.warning("%s is tainted from a forced run" % k)
- h = hashlib.md5(data).hexdigest()
+ h = hashlib.md5(data.encode("utf-8")).hexdigest()
self.taskhash[k] = h
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
with os.fdopen(fd, "wb") as stream:
p = pickle.dump(data, stream, -1)
stream.flush()
- os.chmod(tmpfile, 0664)
+ os.chmod(tmpfile, 0o664)
os.rename(tmpfile, sigfile)
except (OSError, IOError) as err:
try:
if val is not None:
basedata = basedata + str(val)
- return hashlib.md5(basedata).hexdigest()
+ return hashlib.md5(basedata.encode("utf-8")).hexdigest()
def calc_taskhash(sigdata):
data = sigdata['basehash']
else:
data = data + sigdata['taint']
- return hashlib.md5(data).hexdigest()
+ return hashlib.md5(data.encode("utf-8")).hexdigest()
def dump_sigfile(a):
return
if not item in dataCache.providers:
- close_matches = self.get_close_matches(item, dataCache.providers.keys())
+ close_matches = self.get_close_matches(item, list(dataCache.providers.keys()))
# Is it in RuntimeProviders ?
all_p = bb.providers.getRuntimeProviders(dataCache, item)
for fn in all_p:
dependees = self.get_dependees(targetid)
for fnid in dependees:
self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_idepends)):
+ for taskid in range(len(self.tasks_idepends)):
idepends = self.tasks_idepends[taskid]
for (idependid, idependtask) in idepends:
if idependid == targetid:
dependees = self.get_rdependees(targetid)
for fnid in dependees:
self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_irdepends)):
+ for taskid in range(len(self.tasks_irdepends)):
irdepends = self.tasks_irdepends[taskid]
for (idependid, idependtask) in irdepends:
if idependid == targetid:
logger.debug(3, ", ".join(self.run_names_index))
logger.debug(3, "build_targets:")
- for buildid in xrange(len(self.build_names_index)):
+ for buildid in range(len(self.build_names_index)):
target = self.build_names_index[buildid]
targets = "None"
if buildid in self.build_targets:
logger.debug(3, " (%s)%s: %s", buildid, target, targets)
logger.debug(3, "run_targets:")
- for runid in xrange(len(self.run_names_index)):
+ for runid in range(len(self.run_names_index)):
target = self.run_names_index[runid]
targets = "None"
if runid in self.run_targets:
logger.debug(3, " (%s)%s: %s", runid, target, targets)
logger.debug(3, "tasks:")
- for task in xrange(len(self.tasks_name)):
+ for task in range(len(self.tasks_name)):
logger.debug(3, " (%s)%s - %s: %s",
task,
self.fn_index[self.tasks_fnid[task]],
if hasattr(bb.utils, "_context"):
self.context = bb.utils._context
else:
- import __builtin__
- self.context = __builtin__.__dict__
+ import builtins
+ self.context = builtins.__dict__
def parseExpression(self, exp):
parsedvar = self.d.expandWithRefs(exp, None)
self.assertEqual(self.d.getVar("foo", False), None)
def test_keys(self):
- keys = self.d.keys()
- self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
+ keys = list(self.d.keys())
+ self.assertCountEqual(keys, ['value_of_foo', 'foo', 'bar'])
def test_keys_deletion(self):
newd = bb.data.createCopy(self.d)
newd.delVar("bar")
- keys = newd.keys()
- self.assertEqual(keys, ['value_of_foo', 'foo'])
+ keys = list(newd.keys())
+ self.assertCountEqual(keys, ['value_of_foo', 'foo'])
class TestNestedExpansions(unittest.TestCase):
def setUp(self):
self.d.setVar("TEST2_bar", "testvalue2")
bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
- self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
def test_multiple_override(self):
self.d.setVar("TEST_bar", "testvalue2")
self.d.setVar("TEST_foo", "testvalue4")
bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
- self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
def test_multiple_combined_overrides(self):
self.d.setVar("TEST_local_foo_bar", "testvalue3")
def parsehelper(self, content, suffix = ".bb"):
f = tempfile.NamedTemporaryFile(suffix = suffix)
- f.write(content)
+ f.write(bytes(content, "utf-8"))
f.flush()
os.chdir(os.path.dirname(f.name))
return f
def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
try:
pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
- except Exception, e:
+ except Exception as e:
return None
if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
self.set_text(text)
def set_stop_title(self, text=None):
- if not text:
- text = ""
- self.set_text(text)
+ if not text:
+ text = ""
+ self.set_text(text)
def reset(self):
self.set_fraction(0)
import gobject
import logging
import time
-import urllib
-import urllib2
+import urllib.request, urllib.parse, urllib.error
+import urllib.request, urllib.error, urllib.parse
import pango
from bb.ui.crumbs.hobcolor import HobColors
from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
class RunningBuildModel (gtk.TreeStore):
- (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
+ (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = list(range(7))
def __init__ (self):
gtk.TreeStore.__init__ (self,
url = 'http://pastebin.com/api_public.php'
params = {'paste_code': text, 'paste_format': 'text'}
- req = urllib2.Request(url, urllib.urlencode(params))
- response = urllib2.urlopen(req)
+ req = urllib.request.Request(url, urllib.parse.urlencode(params))
+ response = urllib.request.urlopen(req)
paste_url = response.read()
return paste_url
# @todo Provide visual feedback to the user that it is done and that
# it worked.
- print paste_url
+ print(paste_url)
self._add_to_clipboard(paste_url)
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+from gi import pygtkcompat
+
+pygtkcompat.enable()
+pygtkcompat.enable_gtk(version='3.0')
+
import gobject
import gtk
-import xmlrpclib
+import xmlrpc.client
from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
from bb.ui.crumbs.progress import ProgressBar
-import Queue
+import queue
def event_handle_idle_func (eventHandler, build, pbar):
elif ret != True:
print("Error running command '%s': returned %s" % (cmdline, ret))
return 1
- except xmlrpclib.Fault as x:
+ except xmlrpcclient.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return 1
import os
import sys
-import xmlrpclib
+import xmlrpc.client as xmlrpclib
import logging
import progressbar
import signal
def clearFooter(self):
if self.footer_present:
lines = self.footer_present
- sys.stdout.write(self.curses.tparm(self.cuu, lines))
- sys.stdout.write(self.curses.tparm(self.ed))
+ sys.stdout.buffer.write(self.curses.tparm(self.cuu, lines))
+ sys.stdout.buffer.write(self.curses.tparm(self.ed))
sys.stdout.flush()
self.footer_present = False
"""
-from __future__ import division
+
import logging
import os, sys, itertools, time, subprocess
sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
import bb
-import xmlrpclib
+import xmlrpc.client
from bb import ui
from bb.ui import uihelper
elif ret != True:
print("Couldn't get default commandlind! %s" % ret)
return
- except xmlrpclib.Fault as x:
+ except xmlrpc.client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
taw.setText(0, 0, "")
if activetasks:
taw.appendText("Active Tasks:\n")
- for task in activetasks.itervalues():
+ for task in activetasks.values():
taw.appendText(task["title"] + '\n')
if failedtasks:
taw.appendText("Failed Tasks:\n")
"""
import socket, threading, pickle, collections
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
class BBUIEventQueue:
def __init__(self, BBServer, clientinfo=("localhost, 0")):
SimpleXMLRPCServer.__init__( self,
interface,
requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
+ logRequests=False, allow_none=True, use_builtin_types=True)
def get_request(self):
while not self.quit:
import signal
import ast
import collections
-from commands import getstatusoutput
+from subprocess import getstatusoutput
from contextlib import contextmanager
from ctypes import cdll
r.append((0, int(m.group(1))))
s = m.group(2)
continue
- if s[0] in string.letters:
+ if s[0] in string.ascii_letters:
m = alpha_regexp.match(s)
r.append((1, m.group(1)))
s = m.group(2)
"""
removed_vars = {}
- for key in os.environ.keys():
+ for key in list(os.environ):
if key in good_vars:
continue
"""
Remove all variables from the environment.
"""
- for s in os.environ.keys():
+ for s in list(os.environ.keys()):
os.unsetenv(s)
del os.environ[s]
if not val:
return falsevalue
val = set(val.split())
- if isinstance(checkvalues, basestring):
+ if isinstance(checkvalues, str):
checkvalues = set(checkvalues.split())
else:
checkvalues = set(checkvalues)
if not val:
return falsevalue
val = set(val.split())
- if isinstance(checkvalues, basestring):
+ if isinstance(checkvalues, str):
checkvalues = set(checkvalues.split())
else:
checkvalues = set(checkvalues)
aidx += 1
# Handle keyword arguments
context.update(kwargs)
- funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
+ funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
comp = bb.utils.better_compile(code, '<string>', '<string>')
bb.utils.better_exec(comp, context, code, '<string>')
else:
varset_new = varset_start
- if isinstance(indent, (int, long)):
+ if isinstance(indent, int):
if indent == -1:
indentspc = ' ' * (len(varset_new) + 2)
else:
in_var = None
else:
skip = False
- for (varname, var_re) in var_res.iteritems():
+ for (varname, var_re) in var_res.items():
res = var_re.match(line)
if res:
isfunc = varname.endswith('()')
# Use longest path so we handle nested layers
matchlen = 0
match = None
- for collection, regex in collection_res.iteritems():
+ for collection, regex in collection_res.items():
if len(regex) > matchlen and re.match(regex, path):
matchlen = len(regex)
match = collection
applied_appends = []
for layer in layers:
overlayed = []
- for f in self.tinfoil.cooker.collection.overlayed.iterkeys():
+ for f in self.tinfoil.cooker.collection.overlayed.keys():
for of in self.tinfoil.cooker.collection.overlayed[f]:
if of.startswith(layer):
overlayed.append(of)
self.tinfoil = tinfoil
self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split()
layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data)
- self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.iteritems()}
+ self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()}
@staticmethod
def add_command(subparsers, cmdname, function, parserecipes=True, *args, **kwargs):
import argparse
-import httplib
+import http.client
import json
import logging
import os
import subprocess
-import urlparse
+import urllib.parse
from bblayers.action import ActionPlugin
def get_json_data(self, apiurl):
proxy_settings = os.environ.get("http_proxy", None)
conn = None
- _parsedurl = urlparse.urlparse(apiurl)
+ _parsedurl = urllib.parse.urlparse(apiurl)
path = _parsedurl.path
query = _parsedurl.query
def parse_url(url):
- parsedurl = urlparse.urlparse(url)
+ parsedurl = urllib.parse.urlparse(url)
if parsedurl.netloc[0] == '[':
host, port = parsedurl.netloc[1:].split(']', 1)
if ':' in port:
if proxy_settings is None:
host, port = parse_url(apiurl)
- conn = httplib.HTTPConnection(host, port)
+ conn = http.client.HTTPConnection(host, port)
conn.request("GET", path + "?" + query)
else:
host, port = parse_url(proxy_settings)
- conn = httplib.HTTPConnection(host, port)
+ conn = http.client.HTTPConnection(host, port)
conn.request("GET", apiurl)
r = conn.getresponse()
# Ensure we list skipped recipes
# We are largely guessing about PN, PV and the preferred version here,
# but we have no choice since skipped recipes are not fully parsed
- skiplist = self.tinfoil.cooker.skiplist.keys()
+ skiplist = list(self.tinfoil.cooker.skiplist.keys())
skiplist.sort( key=lambda fileitem: self.tinfoil.cooker.collection.calc_bbfile_priority(fileitem) )
skiplist.reverse()
for fn in skiplist:
def show_appends_for_skipped(self):
filenames = [os.path.basename(f)
- for f in self.tinfoil.cooker.skiplist.iterkeys()]
+ for f in self.tinfoil.cooker.skiplist.keys()]
return self.show_appends_output(filenames, None, " (skipped)")
def show_appends_output(self, filenames, best_filename, name_suffix = ''):
paren_or_comma()
self.write(keyword.arg + '=')
self.visit(keyword.value)
- if node.starargs is not None:
+ if hasattr(node, 'starargs') and node.starargs is not None:
paren_or_comma()
self.write('*')
self.visit(node.starargs)
- if node.kwargs is not None:
+ if hasattr(node, 'kwargs') and node.kwargs is not None:
paren_or_comma()
self.write('**')
self.visit(node.kwargs)
write_comma()
self.write(keyword.arg + '=')
self.visit(keyword.value)
- if node.starargs is not None:
+ if hasattr(node, 'starargs') and node.starargs is not None:
write_comma()
self.write('*')
self.visit(node.starargs)
- if node.kwargs is not None:
+ if hasattr(node, 'kwargs') and node.kwargs is not None:
write_comma()
self.write('**')
self.visit(node.kwargs)
self.lexer = None
self.parser= None
def __getitem__(self,n):
+ if isinstance(n,slice):
+ return [self[i] for i in range(*(n.indices(len(self.slice))))]
if n >= 0: return self.slice[n].value
else: return self.stack[n].value
self.connection.close()
def __getitem__(self,tblname):
- if not isinstance(tblname, basestring):
+ if not isinstance(tblname, str):
raise TypeError("tblname argument must be a string, not '%s'" %
type(tblname))
if tblname in self._tables:
import os,sys,logging
import signal, time
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
import threading
-import Queue
+import queue
import socket
-import StringIO
+import io
try:
import sqlite3
self.register_function(self.importone, "importone")
self.register_introspection_functions()
- self.requestqueue = Queue.Queue()
+ self.requestqueue = queue.Queue()
self.handlerthread = threading.Thread(target = self.process_request_thread)
self.handlerthread.daemon = False
while not self.quit:
try:
(request, client_address) = self.requestqueue.get(True, 30)
- except Queue.Empty:
+ except queue.Empty:
self.table.sync_if_dirty()
continue
try:
Returns None if the database engine does not support dumping to
script or if some other error is encountered in processing.
"""
- buff = StringIO.StringIO()
+ buff = io.StringIO()
try:
self.table.sync()
self.table.dump_db(buff)
def auto_start(d):
global singleton
- host_params = filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':'))
+ host_params = list(filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':')))
if not host_params:
return None