raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
- autoinc, rev = urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
+ autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
if len(rev) > 10:
rev = rev[:10]
if autoinc:
for scm in scms:
ud = urldata[scm]
for name in ud.names:
- autoinc, rev = ud.method.sortable_revision(scm, ud, d, name)
+ autoinc, rev = ud.method.sortable_revision(ud, d, name)
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
# False means try another url
try:
if check:
- found = ud.method.checkstatus(ud.url, ud, ld)
+ found = ud.method.checkstatus(ud, ld)
if found:
return found
return False
os.chdir(ld.getVar("DL_DIR", True))
- if not os.path.exists(ud.donestamp) or ud.method.need_update(ud.url, ud, ld):
- ud.method.download(ud.url, ud, ld)
+ if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
+ ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"):
- ud.method.build_mirror_data(ud.url, ud, ld)
+ ud.method.build_mirror_data(ud, ld)
if not ud.localpath or not os.path.exists(ud.localpath):
return False
dest = os.path.join(dldir, os.path.basename(ud.localpath))
if not os.path.exists(dest):
os.symlink(ud.localpath, dest)
- if not os.path.exists(origud.donestamp) or origud.method.need_update(origud.url, origud, ld):
- origud.method.download(origud.url, origud, ld)
+ if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
+ origud.method.download(origud, ld)
if hasattr(ud.method,"build_mirror_data"):
- origud.method.build_mirror_data(origud.url, origud, ld)
+ origud.method.build_mirror_data(origud, ld)
return None
# Otherwise the result is a local file:// and we symlink to it
if not os.path.exists(origud.localpath):
var = "SRCREV_%s_pn-%s" % (name, pn)
raise FetchError("Please set %s to a valid value" % var, ud.url)
if rev == "AUTOINC":
- rev = ud.method.latest_revision(ud.url, ud, d, name)
+ rev = ud.method.latest_revision(ud, d, name)
return rev
self.method = None
for m in methods:
- if m.supports(url, self, d):
+ if m.supports(self, d):
self.method = m
break
self.localpath = self.parm["localpath"]
self.basename = os.path.basename(self.localpath)
elif self.localfile:
- self.localpath = self.method.localpath(self.url, self, d)
+ self.localpath = self.method.localpath(self, d)
dldir = d.getVar("DL_DIR", True)
# Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
def setup_localpath(self, d):
if not self.localpath:
- self.localpath = self.method.localpath(self.url, self, d)
+ self.localpath = self.method.localpath(self, d)
def getSRCDate(self, d):
"""
def __init__(self, urls = []):
self.urls = []
- def supports(self, url, urldata, d):
+ def supports(self, urldata, d):
"""
Check to see if this fetch class supports a given url.
"""
return 0
- def localpath(self, url, urldata, d):
+ def localpath(self, urldata, d):
"""
Return the local filename of a given url assuming a successful fetch.
Can also setup variables in urldata for use in go (saving code duplication
urls = property(getUrls, setUrls, None, "Urls property")
- def need_update(self, url, ud, d):
+ def need_update(self, ud, d):
"""
Force a fetch, even if localpath exists?
"""
"""
return False
- def download(self, url, urldata, d):
+ def download(self, urldata, d):
"""
Fetch urls
Assumes localpath was called first
"""
bb.utils.remove(urldata.localpath)
- def try_premirror(self, url, urldata, d):
+ def try_premirror(self, urldata, d):
"""
Should premirrors be used?
"""
return True
- def checkstatus(self, url, urldata, d):
+ def checkstatus(self, urldata, d):
"""
Check the status of a URL
Assumes localpath was called first
logger.info("URL %s could not be checked for status since no method exists.", url)
return True
- def latest_revision(self, url, ud, d, name):
+ def latest_revision(self, ud, d, name):
"""
Look in the cache for the latest revision, if not present ask the SCM.
"""
raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
- key = self.generate_revision_key(url, ud, d, name)
+ key = self.generate_revision_key(ud, d, name)
try:
return revs[key]
except KeyError:
- revs[key] = rev = self._latest_revision(url, ud, d, name)
+ revs[key] = rev = self._latest_revision(ud, d, name)
return rev
- def sortable_revision(self, url, ud, d, name):
- latest_rev = self._build_revision(url, ud, d, name)
+ def sortable_revision(self, ud, d, name):
+ latest_rev = self._build_revision(ud, d, name)
return True, str(latest_rev)
- def generate_revision_key(self, url, ud, d, name):
- key = self._revision_key(url, ud, d, name)
+ def generate_revision_key(self, ud, d, name):
+ key = self._revision_key(ud, d, name)
return "%s-%s" % (key, d.getVar("PN", True) or "")
class Fetch(object):
try:
self.d.setVar("BB_NO_NETWORK", network)
- if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d):
+ if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
localpath = ud.localpath
- elif m.try_premirror(u, ud, self.d):
+ elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
localpath = try_mirrors(self.d, ud, mirrors, False)
os.chdir(self.d.getVar("DL_DIR", True))
firsterr = None
- if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)):
+ if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
try:
logger.debug(1, "Trying Upstream")
- m.download(u, ud, self.d)
+ m.download(ud, self.d)
if hasattr(m, "build_mirror_data"):
- m.build_mirror_data(u, ud, self.d)
+ m.build_mirror_data(ud, self.d)
localpath = ud.localpath
# early checksum verify, so that if checksum mismatched,
# fetcher still have chance to fetch from mirror
if not ret:
# Next try checking from the original uri, u
try:
- ret = m.checkstatus(u, ud, self.d)
+ ret = m.checkstatus(ud, self.d)
except:
# Finally, try checking uri, u, from MIRRORS
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
from bb.fetch2 import logger
class Bzr(FetchMethod):
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
return ud.type in ['bzr']
def urldata_init(self, ud, d):
return bzrcmd
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""Fetch url"""
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", loc)
+ logger.debug(1, "BZR Update %s", ud.url)
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
runfetchcmd(bzrcmd, d)
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- logger.debug(1, "BZR Checkout %s", loc)
+ logger.debug(1, "BZR Checkout %s", ud.url)
bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", bzrcmd)
def supports_srcrev(self):
return True
- def _revision_key(self, url, ud, d, name):
+ def _revision_key(self, ud, d, name):
"""
Return a unique key for the url
"""
return "bzr:" + ud.pkgdir
- def _latest_revision(self, url, ud, d, name):
+ def _latest_revision(self, ud, d, name):
"""
Return the latest upstream revision number
"""
- logger.debug(2, "BZR fetcher hitting network for %s", url)
+ logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
return output.strip()
- def sortable_revision(self, url, ud, d, name):
+ def sortable_revision(self, ud, d, name):
"""
Return a sortable revision number which in our case is the revision number
"""
- return False, self._build_revision(url, ud, d)
+ return False, self._build_revision(ud, d)
- def _build_revision(self, url, ud, d):
+ def _build_revision(self, ud, d):
return ud.revision
"""
Class to fetch a module or modules from cvs repositories
"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with cvs.
"""
ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
- def need_update(self, url, ud, d):
+ def need_update(self, ud, d):
if (ud.date == "now"):
return True
if not os.path.exists(ud.localpath):
return True
return False
- def download(self, loc, ud, d):
+ def download(self, ud, d):
method = ud.parm.get('method', 'pserver')
localdir = ud.parm.get('localdir', ud.module)
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
moddir = os.path.join(pkgdir, localdir)
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- logger.info("Update " + loc)
+ logger.info("Update " + ud.url)
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
# update sources there
os.chdir(moddir)
cmd = cvsupdatecmd
else:
- logger.info("Fetch " + loc)
+ logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(pkgdir)
os.chdir(pkgdir)
def init(self, d):
pass
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with git.
"""
ud.localfile = ud.clonedir
- def localpath(self, url, ud, d):
+ def localpath(self, ud, d):
return ud.clonedir
- def need_update(self, u, ud, d):
+ def need_update(self, ud, d):
if not os.path.exists(ud.clonedir):
return True
os.chdir(ud.clonedir)
return True
return False
- def try_premirror(self, u, ud, d):
+ def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
return False
return True
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""Fetch url"""
if ud.user:
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
ud.repochanged = True
- def build_mirror_data(self, url, ud, d):
+ def build_mirror_data(self, ud, d):
# Generate a mirror tarball if needed
if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
# it's possible that this symlink points to read-only filesystem with PREMIRROR
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
return output.split()[0] != "0"
- def _revision_key(self, url, ud, d, name):
+ def _revision_key(self, ud, d, name):
"""
Return a unique key for the url
"""
return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
- def _latest_revision(self, url, ud, d, name):
+ def _latest_revision(self, ud, d, name):
"""
Compute the HEAD revision for the url
"""
bb.fetch2.check_network_access(d, cmd)
output = runfetchcmd(cmd, d, True)
if not output:
- raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
+ raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
return output.split()[0]
- def _build_revision(self, url, ud, d, name):
+ def _build_revision(self, ud, d, name):
return ud.revisions[name]
- def checkstatus(self, uri, ud, d):
- fetchcmd = "%s ls-remote %s" % (ud.basecmd, uri)
+ def checkstatus(self, ud, d):
+ fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
try:
runfetchcmd(fetchcmd, d, quiet=True)
return True
from bb.fetch2 import logger
class GitSM(Git):
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with git.
"""
pass
return False
- def update_submodules(self, u, ud, d):
+ def update_submodules(self, ud, d):
# We have to convert bare -> full repo, do the submodule bit, then convert back
tmpclonedir = ud.clonedir + ".tmp"
gitdir = tmpclonedir + os.sep + ".git"
os.rename(gitdir, ud.clonedir,)
bb.utils.remove(tmpclonedir, True)
- def download(self, loc, ud, d):
- Git.download(self, loc, ud, d)
+ def download(self, ud, d):
+ Git.download(self, ud, d)
os.chdir(ud.clonedir)
submodules = self.uses_submodules(ud, d)
if submodules:
- self.update_submodules(loc, ud, d)
+ self.update_submodules(ud, d)
def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d)
class Hg(FetchMethod):
"""Class to fetch from mercurial repositories"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with mercurial.
"""
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
- def need_update(self, url, ud, d):
+ def need_update(self, ud, d):
revTag = ud.parm.get('rev', 'tip')
if revTag == "tip":
return True
return cmd
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""Fetch url"""
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
updatecmd = self._buildhgcommand(ud, d, "pull")
- logger.info("Update " + loc)
+ logger.info("Update " + ud.url)
# update sources there
os.chdir(ud.moddir)
logger.debug(1, "Running %s", updatecmd)
else:
fetchcmd = self._buildhgcommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
+ logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
def supports_srcrev(self):
return True
- def _latest_revision(self, url, ud, d, name):
+ def _latest_revision(self, ud, d, name):
"""
Compute tip revision for the url
"""
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
return output.strip()
- def _build_revision(self, url, ud, d, name):
+ def _build_revision(self, ud, d, name):
return ud.revision
- def _revision_key(self, url, ud, d, name):
+ def _revision_key(self, ud, d, name):
"""
Return a unique key for the url
"""
from bb.fetch2 import logger
class Local(FetchMethod):
- def supports(self, url, urldata, d):
+ def supports(self, urldata, d):
"""
Check to see if a given url represents a local fetch.
"""
ud.basepath = ud.decodedurl
return
- def localpath(self, url, urldata, d):
+ def localpath(self, urldata, d):
"""
Return the local filename of a given url assuming a successful fetch.
"""
return dldirfile
return newpath
- def need_update(self, url, ud, d):
- if url.find("*") != -1:
+ def need_update(self, ud, d):
+ if ud.url.find("*") != -1:
return False
if os.path.exists(ud.localpath):
return False
return True
- def download(self, url, urldata, d):
+ def download(self, urldata, d):
"""Fetch urls (no-op for Local method)"""
# no need to fetch local files, we'll deal with them in place.
if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
locations.append(filesdir)
locations.append(d.getVar("DL_DIR", True))
- msg = "Unable to find file " + url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
+ msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
raise FetchError(msg)
return True
- def checkstatus(self, url, urldata, d):
+ def checkstatus(self, urldata, d):
"""
Check the status of the url
"""
if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", url)
+ logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
return True
if os.path.exists(urldata.localpath):
return True
"""Class to fetch a module or modules from Opensuse build server
repositories."""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with osc.
"""
return osccmd
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""
Fetch url
"""
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update")
- logger.info("Update "+ loc)
+ logger.info("Update "+ ud.url)
# update sources there
os.chdir(ud.moddir)
logger.debug(1, "Running %s", oscupdatecmd)
runfetchcmd(oscupdatecmd, d)
else:
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
+ logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
from bb.fetch2 import runfetchcmd
class Perforce(FetchMethod):
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
return ud.type in ['p4']
def doparse(url, d):
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""
Fetch urls
"""
- (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
+ (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
if depot.find('/...') != -1:
path = depot[:depot.find('/...')]
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
tmpfile = tmpfile.strip()
if not tmpfile:
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
+ raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
if "label" in parm:
depot = "%s@%s" % (depot, parm["label"])
depot = "%s@%s" % (depot, cset)
os.chdir(tmpfile)
- logger.info("Fetch " + loc)
+ logger.info("Fetch " + ud.url)
logger.info("%s%s files %s", p4cmd, p4opt, depot)
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
p4file = [f.rstrip() for f in p4file.splitlines()]
if not p4file:
- raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
+ raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
count = 0
if count == 0:
logger.error()
- raise FetchError("Fetch: No files gathered from the P4 fetch", loc)
+ raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
# cleanup
class Repo(FetchMethod):
"""Class to fetch a module or modules from repo (git) repositories"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with repo.
"""
ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""Fetch url"""
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
def supports_srcrev(self):
return False
- def _build_revision(self, url, ud, d):
+ def _build_revision(self, ud, d):
return ud.manifest
- def _want_sortable_revision(self, url, ud, d):
+ def _want_sortable_revision(self, ud, d):
return False
class SFTP(FetchMethod):
"""Class to fetch urls via 'sftp'"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with sftp.
"""
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
- def download(self, uri, ud, d):
+ def download(self, ud, d):
"""Fetch urls"""
- urlo = URI(uri)
+ urlo = URI(ud.url)
basecmd = 'sftp -oPasswordAuthentication=no'
port = ''
if urlo.port:
cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
commands.mkarg(lpath))
- bb.fetch2.check_network_access(d, cmd, uri)
+ bb.fetch2.check_network_access(d, cmd, ud.url)
runfetchcmd(cmd, d)
return True
class SSH(FetchMethod):
'''Class to fetch a module or modules via Secure Shell'''
- def supports(self, url, urldata, d):
- return __pattern__.match(url) != None
+ def supports(self, urldata, d):
+ return __pattern__.match(urldata.url) != None
def supports_checksum(self, urldata):
return False
host = m.group('host')
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
- def download(self, url, urldata, d):
+ def download(self, urldata, d):
dldir = d.getVar('DL_DIR', True)
- m = __pattern__.match(url)
+ m = __pattern__.match(urldata.url)
path = m.group('path')
host = m.group('host')
port = m.group('port')
class Svk(FetchMethod):
"""Class to fetch a module or modules from svk repositories"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with svk.
"""
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
- def need_update(self, url, ud, d):
+ def need_update(self, ud, d):
if ud.date == "now":
return True
if not os.path.exists(ud.localpath):
return True
return False
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""Fetch urls"""
svkroot = ud.host + ud.path
tmpfile = tmpfile.strip()
if not tmpfile:
logger.error()
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
+ raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
# check out sources there
os.chdir(tmpfile)
- logger.info("Fetch " + loc)
+ logger.info("Fetch " + ud.url)
logger.debug(1, "Running %s", svkcmd)
runfetchcmd(svkcmd, d, cleanup = [tmpfile])
class Svn(FetchMethod):
"""Class to fetch a module or modules from svn repositories"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with svn.
"""
return svncmd
- def download(self, loc, ud, d):
+ def download(self, ud, d):
"""Fetch url"""
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + loc)
+ logger.info("Update " + ud.url)
# update sources there
os.chdir(ud.moddir)
# We need to attempt to run svn upgrade first in case its an older working format
runfetchcmd(svnupdatecmd, d)
else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
+ logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
def supports_srcrev(self):
return True
- def _revision_key(self, url, ud, d, name):
+ def _revision_key(self, ud, d, name):
"""
Return a unique key for the url
"""
return "svn:" + ud.moddir
- def _latest_revision(self, url, ud, d, name):
+ def _latest_revision(self, ud, d, name):
"""
Return the latest upstream revision number
"""
return revision
- def sortable_revision(self, url, ud, d, name):
+ def sortable_revision(self, ud, d, name):
"""
Return a sortable revision number which in our case is the revision number
"""
- return False, self._build_revision(url, ud, d)
+ return False, self._build_revision(ud, d)
- def _build_revision(self, url, ud, d):
+ def _build_revision(self, ud, d):
return ud.revision
class Wget(FetchMethod):
"""Class to fetch urls via 'wget'"""
- def supports(self, url, ud, d):
+ def supports(self, ud, d):
"""
Check to see if a given url can be fetched with wget.
"""
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
- def download(self, uri, ud, d, checkonly = False):
+ def download(self, ud, d, checkonly = False):
"""Fetch urls"""
basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
else:
fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")
- uri = uri.split(";")[0]
+ uri = ud.url.split(";")[0]
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
return True
- def checkstatus(self, uri, ud, d):
- return self.download(uri, ud, d, True)
+ def checkstatus(self, ud, d):
+ return self.download(ud, d, True)