]>
Commit | Line | Data |
---|---|---|
1 | # Development tool - upgrade command plugin | |
2 | # | |
3 | # Copyright (C) 2014-2017 Intel Corporation | |
4 | # | |
5 | # SPDX-License-Identifier: GPL-2.0-only | |
6 | # | |
7 | """Devtool upgrade plugin""" | |
8 | ||
9 | import os | |
10 | import sys | |
11 | import re | |
12 | import shutil | |
13 | import tempfile | |
14 | import logging | |
15 | import argparse | |
16 | import scriptutils | |
17 | import errno | |
18 | import bb | |
19 | ||
20 | devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib' | |
21 | sys.path = sys.path + [devtool_path] | |
22 | ||
23 | import oe.recipeutils | |
24 | from devtool import standard | |
25 | from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version | |
26 | ||
27 | logger = logging.getLogger('devtool') | |
28 | ||
29 | def _run(cmd, cwd=''): | |
30 | logger.debug("Running command %s> %s" % (cwd,cmd)) | |
31 | return bb.process.run('%s' % cmd, cwd=cwd) | |
32 | ||
33 | def _get_srctree(tmpdir): | |
34 | srctree = tmpdir | |
35 | dirs = os.listdir(tmpdir) | |
36 | if len(dirs) == 1: | |
37 | srctree = os.path.join(tmpdir, dirs[0]) | |
38 | else: | |
39 | raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs)) | |
40 | return srctree | |
41 | ||
42 | def _copy_source_code(orig, dest): | |
43 | for path in standard._ls_tree(orig): | |
44 | dest_dir = os.path.join(dest, os.path.dirname(path)) | |
45 | bb.utils.mkdirhier(dest_dir) | |
46 | dest_path = os.path.join(dest, path) | |
47 | shutil.move(os.path.join(orig, path), dest_path) | |
48 | ||
49 | def _remove_patch_dirs(recipefolder): | |
50 | for root, dirs, files in os.walk(recipefolder): | |
51 | for d in dirs: | |
52 | shutil.rmtree(os.path.join(root,d)) | |
53 | ||
54 | def _recipe_contains(rd, var): | |
55 | rf = rd.getVar('FILE') | |
56 | varfiles = oe.recipeutils.get_var_files(rf, [var], rd) | |
57 | for var, fn in varfiles.items(): | |
58 | if fn and fn.startswith(os.path.dirname(rf) + os.sep): | |
59 | return True | |
60 | return False | |
61 | ||
62 | def _rename_recipe_dirs(oldpv, newpv, path): | |
63 | for root, dirs, files in os.walk(path): | |
64 | # Rename directories with the version in their name | |
65 | for olddir in dirs: | |
66 | if olddir.find(oldpv) != -1: | |
67 | newdir = olddir.replace(oldpv, newpv) | |
68 | if olddir != newdir: | |
69 | shutil.move(os.path.join(path, olddir), os.path.join(path, newdir)) | |
70 | # Rename any inc files with the version in their name (unusual, but possible) | |
71 | for oldfile in files: | |
72 | if oldfile.endswith('.inc'): | |
73 | if oldfile.find(oldpv) != -1: | |
74 | newfile = oldfile.replace(oldpv, newpv) | |
75 | if oldfile != newfile: | |
76 | bb.utils.rename(os.path.join(path, oldfile), | |
77 | os.path.join(path, newfile)) | |
78 | ||
79 | def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): | |
80 | oldrecipe = os.path.basename(oldrecipe) | |
81 | if oldrecipe.endswith('_%s.bb' % oldpv): | |
82 | newrecipe = '%s_%s.bb' % (pn, newpv) | |
83 | if oldrecipe != newrecipe: | |
84 | shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) | |
85 | else: | |
86 | newrecipe = oldrecipe | |
87 | return os.path.join(path, newrecipe) | |
88 | ||
89 | def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): | |
90 | _rename_recipe_dirs(oldpv, newpv, path) | |
91 | return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) | |
92 | ||
93 | def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): | |
94 | """Writes an append file""" | |
95 | if not os.path.exists(rc): | |
96 | raise DevtoolError("bbappend not created because %s does not exist" % rc) | |
97 | ||
98 | appendpath = os.path.join(workspace, 'appends') | |
99 | if not os.path.exists(appendpath): | |
100 | bb.utils.mkdirhier(appendpath) | |
101 | ||
102 | brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename | |
103 | ||
104 | srctree = os.path.abspath(srctree) | |
105 | pn = d.getVar('PN') | |
106 | af = os.path.join(appendpath, '%s.bbappend' % brf) | |
107 | with open(af, 'w') as f: | |
108 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') | |
109 | # Local files can be modified/tracked in separate subdir under srctree | |
110 | # Mostly useful for packages with S != WORKDIR | |
111 | f.write('FILESPATH:prepend := "%s:"\n' % | |
112 | os.path.join(srctreebase, 'oe-local-files')) | |
113 | f.write('# srctreebase: %s\n' % srctreebase) | |
114 | f.write('inherit externalsrc\n') | |
115 | f.write(('# NOTE: We use pn- overrides here to avoid affecting' | |
116 | 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) | |
117 | f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) | |
118 | b_is_s = use_external_build(same_dir, no_same_dir, d) | |
119 | if b_is_s: | |
120 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | |
121 | f.write('\n') | |
122 | if revs: | |
123 | for name, rev in revs.items(): | |
124 | f.write('# initial_rev %s: %s\n' % (name, rev)) | |
125 | if copied: | |
126 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) | |
127 | f.write('# original_files: %s\n' % ' '.join(copied)) | |
128 | return af | |
129 | ||
130 | def _cleanup_on_error(rd, srctree): | |
131 | if os.path.exists(rd): | |
132 | shutil.rmtree(rd) | |
133 | srctree = os.path.abspath(srctree) | |
134 | if os.path.exists(srctree): | |
135 | shutil.rmtree(srctree) | |
136 | ||
137 | def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None): | |
138 | if not keep_failure: | |
139 | _cleanup_on_error(rd, srctree) | |
140 | logger.error(e) | |
141 | if extramsg: | |
142 | logger.error(extramsg) | |
143 | if keep_failure: | |
144 | logger.info('Preserving failed upgrade files (--keep-failure)') | |
145 | sys.exit(1) | |
146 | ||
147 | def _get_uri(rd): | |
148 | srcuris = rd.getVar('SRC_URI').split() | |
149 | if not len(srcuris): | |
150 | raise DevtoolError('SRC_URI not found on recipe') | |
151 | # Get first non-local entry in SRC_URI - usually by convention it's | |
152 | # the first entry, but not always! | |
153 | srcuri = None | |
154 | for entry in srcuris: | |
155 | if not entry.startswith('file://'): | |
156 | srcuri = entry | |
157 | break | |
158 | if not srcuri: | |
159 | raise DevtoolError('Unable to find non-local entry in SRC_URI') | |
160 | srcrev = '${AUTOREV}' | |
161 | if '://' in srcuri: | |
162 | # Fetch a URL | |
163 | rev_re = re.compile(';rev=([^;]+)') | |
164 | res = rev_re.search(srcuri) | |
165 | if res: | |
166 | srcrev = res.group(1) | |
167 | srcuri = rev_re.sub('', srcuri) | |
168 | return srcuri, srcrev | |
169 | ||
170 | def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd): | |
171 | """Extract sources of a recipe with a new version""" | |
172 | import oe.patch | |
173 | ||
174 | def __run(cmd): | |
175 | """Simple wrapper which calls _run with srctree as cwd""" | |
176 | return _run(cmd, srctree) | |
177 | ||
178 | crd = rd.createCopy() | |
179 | ||
180 | pv = crd.getVar('PV') | |
181 | crd.setVar('PV', newpv) | |
182 | ||
183 | tmpsrctree = None | |
184 | uri, rev = _get_uri(crd) | |
185 | if srcrev: | |
186 | rev = srcrev | |
187 | paths = [srctree] | |
188 | if uri.startswith('git://') or uri.startswith('gitsm://'): | |
189 | __run('git fetch') | |
190 | __run('git checkout %s' % rev) | |
191 | __run('git tag -f --no-sign devtool-base-new') | |
192 | __run('git submodule update --recursive') | |
193 | __run('git submodule foreach \'git tag -f --no-sign devtool-base-new\'') | |
194 | (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') | |
195 | paths += [os.path.join(srctree, p) for p in stdout.splitlines()] | |
196 | checksums = {} | |
197 | _, _, _, _, _, params = bb.fetch2.decodeurl(uri) | |
198 | srcsubdir_rel = params.get('destsuffix', 'git') | |
199 | if not srcbranch: | |
200 | check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev) | |
201 | get_branch = [x.strip() for x in check_branch.splitlines()] | |
202 | # Remove HEAD reference point and drop remote prefix | |
203 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | |
204 | if len(get_branch) == 1: | |
205 | # If srcrev is on only ONE branch, then use that branch | |
206 | srcbranch = get_branch[0] | |
207 | elif 'main' in get_branch: | |
208 | # If srcrev is on multiple branches, then choose 'main' if it is one of them | |
209 | srcbranch = 'main' | |
210 | elif 'master' in get_branch: | |
211 | # Otherwise choose 'master' if it is one of the branches | |
212 | srcbranch = 'master' | |
213 | else: | |
214 | # If get_branch contains more than one objects, then display error and exit. | |
215 | mbrch = '\n ' + '\n '.join(get_branch) | |
216 | raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch)) | |
217 | else: | |
218 | __run('git checkout devtool-base -b devtool-%s' % newpv) | |
219 | ||
220 | tmpdir = tempfile.mkdtemp(prefix='devtool') | |
221 | try: | |
222 | checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp) | |
223 | except scriptutils.FetchUrlFailure as e: | |
224 | raise DevtoolError(e) | |
225 | ||
226 | if ftmpdir and keep_temp: | |
227 | logger.info('Fetch temp directory is %s' % ftmpdir) | |
228 | ||
229 | tmpsrctree = _get_srctree(tmpdir) | |
230 | srctree = os.path.abspath(srctree) | |
231 | srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) | |
232 | ||
233 | # Delete all sources so we ensure no stray files are left over | |
234 | for item in os.listdir(srctree): | |
235 | if item in ['.git', 'oe-local-files']: | |
236 | continue | |
237 | itempath = os.path.join(srctree, item) | |
238 | if os.path.isdir(itempath): | |
239 | shutil.rmtree(itempath) | |
240 | else: | |
241 | os.remove(itempath) | |
242 | ||
243 | # Copy in new ones | |
244 | _copy_source_code(tmpsrctree, srctree) | |
245 | ||
246 | (stdout,_) = __run('git ls-files --modified --others') | |
247 | filelist = stdout.splitlines() | |
248 | pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist)) | |
249 | pbar.start() | |
250 | batchsize = 100 | |
251 | for i in range(0, len(filelist), batchsize): | |
252 | batch = filelist[i:i+batchsize] | |
253 | __run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch])) | |
254 | pbar.update(i) | |
255 | pbar.finish() | |
256 | ||
257 | useroptions = [] | |
258 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | |
259 | __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) | |
260 | __run('git tag -f --no-sign devtool-base-%s' % newpv) | |
261 | ||
262 | revs = {} | |
263 | for path in paths: | |
264 | (stdout, _) = _run('git rev-parse HEAD', cwd=path) | |
265 | revs[os.path.relpath(path, srctree)] = stdout.rstrip() | |
266 | ||
267 | if no_patch: | |
268 | patches = oe.recipeutils.get_recipe_patches(crd) | |
269 | if patches: | |
270 | logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) | |
271 | else: | |
272 | for path in paths: | |
273 | _run('git checkout devtool-patched -b %s' % branch, cwd=path) | |
274 | (stdout, _) = _run('git branch --list devtool-override-*', cwd=path) | |
275 | branches_to_rebase = [branch] + stdout.split() | |
276 | target_branch = revs[os.path.relpath(path, srctree)] | |
277 | ||
278 | # There is a bug (or feature?) in git rebase where if a commit with | |
279 | # a note is fully rebased away by being part of an old commit, the | |
280 | # note is still attached to the old commit. Avoid this by making | |
281 | # sure all old devtool related commits have a note attached to them | |
282 | # (this assumes git config notes.rewriteMode is set to ignore). | |
283 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | |
284 | for rev in stdout.splitlines(): | |
285 | if not oe.patch.GitApplyTree.getNotes(path, rev): | |
286 | oe.patch.GitApplyTree.addNote(path, rev, "dummy") | |
287 | ||
288 | for b in branches_to_rebase: | |
289 | logger.info("Rebasing {} onto {}".format(b, target_branch)) | |
290 | _run('git checkout %s' % b, cwd=path) | |
291 | try: | |
292 | _run('git rebase %s' % target_branch, cwd=path) | |
293 | except bb.process.ExecutionError as e: | |
294 | if 'conflict' in e.stdout: | |
295 | logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) | |
296 | _run('git rebase --abort', cwd=path) | |
297 | else: | |
298 | logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) | |
299 | ||
300 | # Remove any dummy notes added above. | |
301 | (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) | |
302 | for rev in stdout.splitlines(): | |
303 | oe.patch.GitApplyTree.removeNote(path, rev, "dummy") | |
304 | ||
305 | _run('git checkout %s' % branch, cwd=path) | |
306 | ||
307 | if tmpsrctree: | |
308 | if keep_temp: | |
309 | logger.info('Preserving temporary directory %s' % tmpsrctree) | |
310 | else: | |
311 | shutil.rmtree(tmpsrctree) | |
312 | if tmpdir != tmpsrctree: | |
313 | shutil.rmtree(tmpdir) | |
314 | ||
315 | return (revs, checksums, srcbranch, srcsubdir_rel) | |
316 | ||
317 | def _add_license_diff_to_recipe(path, diff): | |
318 | notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. | |
319 | # The following is the difference between the old and the new license text. | |
320 | # Please update the LICENSE value if needed, and summarize the changes in | |
321 | # the commit message via 'License-Update:' tag. | |
322 | # (example: 'License-Update: copyright years updated.') | |
323 | # | |
324 | # The changes: | |
325 | # | |
326 | """ | |
327 | commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')]) | |
328 | with open(path, 'rb') as f: | |
329 | orig_content = f.read() | |
330 | with open(path, 'wb') as f: | |
331 | f.write(notice_text.encode()) | |
332 | f.write(commented_diff.encode()) | |
333 | f.write("\n#\n\n".encode()) | |
334 | f.write(orig_content) | |
335 | ||
336 | def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): | |
337 | """Creates the new recipe under workspace""" | |
338 | ||
339 | pn = rd.getVar('PN') | |
340 | path = os.path.join(workspace, 'recipes', pn) | |
341 | bb.utils.mkdirhier(path) | |
342 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) | |
343 | if not copied: | |
344 | raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) | |
345 | logger.debug('Copied %s to %s' % (copied, path)) | |
346 | ||
347 | oldpv = rd.getVar('PV') | |
348 | if not newpv: | |
349 | newpv = oldpv | |
350 | origpath = rd.getVar('FILE') | |
351 | fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) | |
352 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | |
353 | ||
354 | newvalues = {} | |
355 | if _recipe_contains(rd, 'PV') and newpv != oldpv: | |
356 | newvalues['PV'] = newpv | |
357 | ||
358 | if srcrev: | |
359 | newvalues['SRCREV'] = srcrev | |
360 | ||
361 | if srcbranch: | |
362 | src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '') | |
363 | changed = False | |
364 | replacing = True | |
365 | new_src_uri = [] | |
366 | for entry in src_uri: | |
367 | try: | |
368 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) | |
369 | except bb.fetch2.MalformedUrl as e: | |
370 | raise DevtoolError("Could not decode SRC_URI: {}".format(e)) | |
371 | if replacing and scheme in ['git', 'gitsm']: | |
372 | branch = params.get('branch', 'master') | |
373 | if rd.expand(branch) != srcbranch: | |
374 | # Handle case where branch is set through a variable | |
375 | res = re.match(r'\$\{([^}@]+)\}', branch) | |
376 | if res: | |
377 | newvalues[res.group(1)] = srcbranch | |
378 | # We know we won't change SRC_URI now, so break out | |
379 | break | |
380 | else: | |
381 | params['branch'] = srcbranch | |
382 | entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | |
383 | changed = True | |
384 | replacing = False | |
385 | new_src_uri.append(entry) | |
386 | if changed: | |
387 | newvalues['SRC_URI'] = ' '.join(new_src_uri) | |
388 | ||
389 | newvalues['PR'] = None | |
390 | ||
391 | # Work out which SRC_URI entries have changed in case the entry uses a name | |
392 | crd = rd.createCopy() | |
393 | crd.setVar('PV', newpv) | |
394 | for var, value in newvalues.items(): | |
395 | crd.setVar(var, value) | |
396 | old_src_uri = (rd.getVar('SRC_URI') or '').split() | |
397 | new_src_uri = (crd.getVar('SRC_URI') or '').split() | |
398 | newnames = [] | |
399 | addnames = [] | |
400 | for newentry in new_src_uri: | |
401 | _, _, _, _, _, params = bb.fetch2.decodeurl(newentry) | |
402 | if 'name' in params: | |
403 | newnames.append(params['name']) | |
404 | if newentry not in old_src_uri: | |
405 | addnames.append(params['name']) | |
406 | # Find what's been set in the original recipe | |
407 | oldnames = [] | |
408 | oldsums = [] | |
409 | noname = False | |
410 | for varflag in rd.getVarFlags('SRC_URI'): | |
411 | for checksum in checksums: | |
412 | if varflag.endswith('.' + checksum): | |
413 | name = varflag.rsplit('.', 1)[0] | |
414 | if name not in oldnames: | |
415 | oldnames.append(name) | |
416 | oldsums.append(checksum) | |
417 | elif varflag == checksum: | |
418 | noname = True | |
419 | oldsums.append(checksum) | |
420 | # Even if SRC_URI has named entries it doesn't have to actually use the name | |
421 | if noname and addnames and addnames[0] not in oldnames: | |
422 | addnames = [] | |
423 | # Drop any old names (the name actually might include ${PV}) | |
424 | for name in oldnames: | |
425 | if name not in newnames: | |
426 | for checksum in oldsums: | |
427 | newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None | |
428 | ||
429 | nameprefix = '%s.' % addnames[0] if addnames else '' | |
430 | ||
431 | # md5sum is deprecated, remove any traces of it. If it was the only old | |
432 | # checksum, then replace it with the default checksums. | |
433 | if 'md5sum' in oldsums: | |
434 | newvalues['SRC_URI[%smd5sum]' % nameprefix] = None | |
435 | oldsums.remove('md5sum') | |
436 | if not oldsums: | |
437 | oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | |
438 | ||
439 | for checksum in oldsums: | |
440 | newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum] | |
441 | ||
442 | if srcsubdir_new != srcsubdir_old: | |
443 | s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) | |
444 | s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR')) | |
445 | if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new: | |
446 | # Subdir for old extracted source matches what S points to (it should!) | |
447 | # but subdir for new extracted source doesn't match what S will be | |
448 | newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}') | |
449 | if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'): | |
450 | # It's the default, drop it | |
451 | # FIXME what if S is being set in a .inc? | |
452 | newvalues['S'] = None | |
453 | logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")') | |
454 | else: | |
455 | logger.info('Source subdirectory has changed, updating S value') | |
456 | ||
457 | if license_diff: | |
458 | newlicchksum = " ".join(["file://{}".format(l['path']) + | |
459 | (";beginline={}".format(l['beginline']) if l['beginline'] else "") + | |
460 | (";endline={}".format(l['endline']) if l['endline'] else "") + | |
461 | (";md5={}".format(l['actual_md5'])) for l in new_licenses]) | |
462 | newvalues["LIC_FILES_CHKSUM"] = newlicchksum | |
463 | _add_license_diff_to_recipe(fullpath, license_diff) | |
464 | ||
465 | tinfoil.modified_files() | |
466 | try: | |
467 | rd = tinfoil.parse_recipe_file(fullpath, False) | |
468 | except bb.tinfoil.TinfoilCommandFailed as e: | |
469 | _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed') | |
470 | oe.recipeutils.patch_recipe(rd, fullpath, newvalues) | |
471 | ||
472 | return fullpath, copied | |
473 | ||
474 | ||
475 | def _check_git_config(): | |
476 | def getconfig(name): | |
477 | try: | |
478 | value = bb.process.run('git config %s' % name)[0].strip() | |
479 | except bb.process.ExecutionError as e: | |
480 | if e.exitcode == 1: | |
481 | value = None | |
482 | else: | |
483 | raise | |
484 | return value | |
485 | ||
486 | username = getconfig('user.name') | |
487 | useremail = getconfig('user.email') | |
488 | configerr = [] | |
489 | if not username: | |
490 | configerr.append('Please set your name using:\n git config --global user.name') | |
491 | if not useremail: | |
492 | configerr.append('Please set your email using:\n git config --global user.email') | |
493 | if configerr: | |
494 | raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr)) | |
495 | ||
496 | def _extract_licenses(srcpath, recipe_licenses): | |
497 | licenses = [] | |
498 | for url in recipe_licenses.split(): | |
499 | license = {} | |
500 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | |
501 | license['path'] = path | |
502 | license['md5'] = parm.get('md5', '') | |
503 | license['beginline'], license['endline'] = 0, 0 | |
504 | if 'beginline' in parm: | |
505 | license['beginline'] = int(parm['beginline']) | |
506 | if 'endline' in parm: | |
507 | license['endline'] = int(parm['endline']) | |
508 | license['text'] = [] | |
509 | with open(os.path.join(srcpath, path), 'rb') as f: | |
510 | import hashlib | |
511 | actual_md5 = hashlib.md5() | |
512 | lineno = 0 | |
513 | for line in f: | |
514 | lineno += 1 | |
515 | if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']): | |
516 | license['text'].append(line.decode(errors='ignore')) | |
517 | actual_md5.update(line) | |
518 | license['actual_md5'] = actual_md5.hexdigest() | |
519 | licenses.append(license) | |
520 | return licenses | |
521 | ||
522 | def _generate_license_diff(old_licenses, new_licenses): | |
523 | need_diff = False | |
524 | for l in new_licenses: | |
525 | if l['md5'] != l['actual_md5']: | |
526 | need_diff = True | |
527 | break | |
528 | if need_diff == False: | |
529 | return None | |
530 | ||
531 | import difflib | |
532 | diff = '' | |
533 | for old, new in zip(old_licenses, new_licenses): | |
534 | for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']): | |
535 | diff = diff + line | |
536 | return diff | |
537 | ||
538 | def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): | |
539 | tasks = [] | |
540 | for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): | |
541 | logger.info('Running extra recipe upgrade task: %s' % task) | |
542 | res = tinfoil.build_targets(pn, task, handle_events=True) | |
543 | ||
544 | if not res: | |
545 | raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) | |
546 | ||
547 | def upgrade(args, config, basepath, workspace): | |
548 | """Entry point for the devtool 'upgrade' subcommand""" | |
549 | ||
550 | if args.recipename in workspace: | |
551 | raise DevtoolError("recipe %s is already in your workspace" % args.recipename) | |
552 | if args.srcbranch and not args.srcrev: | |
553 | raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename) | |
554 | ||
555 | _check_git_config() | |
556 | ||
557 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | |
558 | try: | |
559 | rd = parse_recipe(config, tinfoil, args.recipename, True) | |
560 | if not rd: | |
561 | return 1 | |
562 | ||
563 | pn = rd.getVar('PN') | |
564 | if pn != args.recipename: | |
565 | logger.info('Mapping %s to %s' % (args.recipename, pn)) | |
566 | if pn in workspace: | |
567 | raise DevtoolError("recipe %s is already in your workspace" % pn) | |
568 | ||
569 | if args.srctree: | |
570 | srctree = os.path.abspath(args.srctree) | |
571 | else: | |
572 | srctree = standard.get_default_srctree(config, pn) | |
573 | ||
574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) | |
575 | ||
576 | # try to automatically discover latest version and revision if not provided on command line | |
577 | if not args.version and not args.srcrev: | |
578 | version_info = oe.recipeutils.get_recipe_upstream_version(rd) | |
579 | if version_info['version'] and not version_info['version'].endswith("new-commits-available"): | |
580 | args.version = version_info['version'] | |
581 | if version_info['revision']: | |
582 | args.srcrev = version_info['revision'] | |
583 | if not args.version and not args.srcrev: | |
584 | raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.") | |
585 | ||
586 | standard._check_compatible_recipe(pn, rd) | |
587 | old_srcrev = rd.getVar('SRCREV') | |
588 | if old_srcrev == 'INVALID': | |
589 | old_srcrev = None | |
590 | if old_srcrev and not args.srcrev: | |
591 | raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading") | |
592 | old_ver = rd.getVar('PV') | |
593 | if old_ver == args.version and old_srcrev == args.srcrev: | |
594 | raise DevtoolError("Current and upgrade versions are the same version") | |
595 | if args.version: | |
596 | if bb.utils.vercmp_string(args.version, old_ver) < 0: | |
597 | logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver)) | |
598 | check_prerelease_version(args.version, 'devtool upgrade') | |
599 | ||
600 | rf = None | |
601 | license_diff = None | |
602 | try: | |
603 | logger.info('Extracting current version source...') | |
604 | rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | |
605 | old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) | |
606 | logger.info('Extracting upgraded version source...') | |
607 | rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, | |
608 | args.srcrev, args.srcbranch, args.branch, args.keep_temp, | |
609 | tinfoil, rd) | |
610 | new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) | |
611 | license_diff = _generate_license_diff(old_licenses, new_licenses) | |
612 | rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) | |
613 | except (bb.process.CmdError, DevtoolError) as e: | |
614 | recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) | |
615 | _upgrade_error(e, recipedir, srctree, args.keep_failure) | |
616 | standard._add_md5(config, pn, os.path.dirname(rf)) | |
617 | ||
618 | af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2, | |
619 | copied, config.workspace_path, rd) | |
620 | standard._add_md5(config, pn, af) | |
621 | ||
622 | _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) | |
623 | ||
624 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | |
625 | ||
626 | logger.info('Upgraded source extracted to %s' % srctree) | |
627 | logger.info('New recipe is %s' % rf) | |
628 | if license_diff: | |
629 | logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') | |
630 | preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN')) | |
631 | if preferred_version: | |
632 | logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version) | |
633 | finally: | |
634 | tinfoil.shutdown() | |
635 | return 0 | |
636 | ||
637 | def latest_version(args, config, basepath, workspace): | |
638 | """Entry point for the devtool 'latest_version' subcommand""" | |
639 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | |
640 | try: | |
641 | rd = parse_recipe(config, tinfoil, args.recipename, True) | |
642 | if not rd: | |
643 | return 1 | |
644 | version_info = oe.recipeutils.get_recipe_upstream_version(rd) | |
645 | # "new-commits-available" is an indication that upstream never issues version tags | |
646 | if not version_info['version'].endswith("new-commits-available"): | |
647 | logger.info("Current version: {}".format(version_info['current_version'])) | |
648 | logger.info("Latest version: {}".format(version_info['version'])) | |
649 | if version_info['revision']: | |
650 | logger.info("Latest version's commit: {}".format(version_info['revision'])) | |
651 | else: | |
652 | logger.info("Latest commit: {}".format(version_info['revision'])) | |
653 | finally: | |
654 | tinfoil.shutdown() | |
655 | return 0 | |
656 | ||
657 | def check_upgrade_status(args, config, basepath, workspace): | |
658 | def _print_status(recipe): | |
659 | print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], | |
660 | recipe['cur_ver'], | |
661 | recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), | |
662 | recipe['maintainer'], | |
663 | recipe['revision'] if recipe['revision'] != 'N/A' else "", | |
664 | "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) | |
665 | if not args.recipe: | |
666 | logger.info("Checking the upstream status for all recipes may take a few minutes") | |
667 | results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) | |
668 | for recipegroup in results: | |
669 | upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] | |
670 | currents = [r for r in recipegroup if r['status'] == 'MATCH'] | |
671 | if len(upgrades) > 1: | |
672 | print("These recipes need to be upgraded together {") | |
673 | for r in sorted(upgrades, key=lambda r:r['pn']): | |
674 | _print_status(r) | |
675 | if len(upgrades) > 1: | |
676 | print("}") | |
677 | for r in currents: | |
678 | if args.all: | |
679 | _print_status(r) | |
680 | ||
681 | def register_commands(subparsers, context): | |
682 | """Register devtool subcommands from this plugin""" | |
683 | ||
684 | defsrctree = standard.get_default_srctree(context.config) | |
685 | ||
686 | parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe', | |
687 | description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).', | |
688 | group='starting') | |
689 | parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)') | |
690 | parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree) | |
691 | parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.') | |
692 | parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)') | |
693 | parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)') | |
694 | parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")') | |
695 | parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code') | |
696 | parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | |
697 | group = parser_upgrade.add_mutually_exclusive_group() | |
698 | group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true") | |
699 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") | |
700 | parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') | |
701 | parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)') | |
702 | parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup) | |
703 | ||
704 | parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe', | |
705 | description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)', | |
706 | group='info') | |
707 | parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)') | |
708 | parser_latest_version.set_defaults(func=latest_version) | |
709 | ||
710 | parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes", | |
711 | description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available", | |
712 | group='info') | |
713 | parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*') | |
714 | parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true") | |
715 | parser_check_upgrade_status.set_defaults(func=check_upgrade_status) |