From bdc9e59d98d38c7de40108fb16271d59280689ed Mon Sep 17 00:00:00 2001 From: Jaroslav Kysela Date: Thu, 27 Sep 2018 18:35:51 +0200 Subject: [PATCH] try to move build caching to pcloud --- Makefile | 4 +- Makefile.ffmpeg | 8 +- Makefile.static | 26 +-- configure | 2 +- support/lib.sh | 72 ++++---- support/pcloud.py | 426 ++++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 486 insertions(+), 52 deletions(-) create mode 100755 support/pcloud.py diff --git a/Makefile b/Makefile index 169c0f289..a02675ced 100644 --- a/Makefile +++ b/Makefile @@ -842,7 +842,7 @@ ${BUILDDIR}/libffmpeg_stamp: ${BUILDDIR}/ffmpeg/build/ffmpeg/lib/libavcodec.a @touch $@ ${BUILDDIR}/ffmpeg/build/ffmpeg/lib/libavcodec.a: Makefile.ffmpeg -ifeq ($(CONFIG_BINTRAY_CACHE),yes) +ifeq ($(CONFIG_PCLOUD_CACHE),yes) $(MAKE) -f Makefile.ffmpeg libcacheget endif $(MAKE) -f Makefile.ffmpeg @@ -857,7 +857,7 @@ ${BUILDDIR}/libhdhomerun_stamp: ${BUILDDIR}/hdhomerun/libhdhomerun/libhdhomerun. @touch $@ ${BUILDDIR}/hdhomerun/libhdhomerun/libhdhomerun.a: Makefile.hdhomerun -ifeq ($(CONFIG_BINTRAY_CACHE),yes) +ifeq ($(CONFIG_PCLOUD_CACHE),yes) $(MAKE) -f Makefile.hdhomerun libcacheget endif $(MAKE) -f Makefile.hdhomerun diff --git a/Makefile.ffmpeg b/Makefile.ffmpeg index b4ea28810..ae2175dc2 100644 --- a/Makefile.ffmpeg +++ b/Makefile.ffmpeg @@ -691,7 +691,13 @@ $(LIB_ROOT)/$(FFMPEG)/.tvh_build: \ $(foreach muxer,$(MUXERS),--enable-muxer=$(muxer)) \ $(foreach bsf,$(BSFS),--enable-bsf=$(bsf)) \ $(foreach filter,$(FILTERS),--enable-filter=$(filter)) \ - $(foreach hwaccel,$(HWACCELS),--enable-hwaccel=$(hwaccel)) + $(foreach hwaccel,$(HWACCELS),--enable-hwaccel=$(hwaccel)) \ + --disable-programs \ + --disable-doc \ + --disable-htmlpages \ + --disable-manpages \ + --disable-podpages \ + --disable-txtpages DESTDIR=$(EBUILDIR) \ $(MAKE) -C $(LIB_ROOT)/$(FFMPEG) install @touch $@ diff --git a/Makefile.static b/Makefile.static index c5155600b..5ec22aa8d 100644 --- a/Makefile.static +++ b/Makefile.static @@ -29,20 +29,21 @@ # Required inputs # # LIB_NAME - The name of the library used in files uploaded/downloaded -# LIB_FILES - The files to be packaged +# LIB_FILES - The files to be packaged # # Optional inputs # -# BINTRAY_CACHE - Use cached builds from bintray -# BINTRAY_USER - The bintray user account for uploads -# BINTRAY_PASS - The bintray password -# BINTRAY_REPO - The repo to upload to +# PCLOUD_CACHE - Use cached builds from pcloud +# PCLOUD_USER - The pcloud user account for uploads +# PCLOUD_PASS - The pcloud password +# PCLOUD_BASEDIR - The base directory for uploads +# PCLOUD_HASHDIR - The public hash for the base directory for downloads # MAKEFILE ?= $(firstword $(MAKEFILE_LIST)) # ./configure override -BINTRAY_CACHE ?= $(CONFIG_BINTRAY_CACHE) +PCLOUD_CACHE ?= $(CONFIG_PCLOUD_CACHE) default: libcacheput @@ -52,17 +53,18 @@ export CODENAME export ROOTDIR export BUILDDIR export LIBDIR -export BINTRAY_CACHE -export BINTRAY_USER -export BINTRAY_PASS -export BINTRAY_REPO +export PCLOUD_CACHE +export PCLOUD_USER +export PCLOUD_PASS +export PCLOUD_BASEDIR +export PCLOUD_HASHDIR LIB_STATIC_STAMP := $(BUILDDIR)/$(LIB_NAME)_static_stamp # Download cache .PHONY: libcacheget libcacheget: -ifeq ($(BINTRAY_CACHE),yes) +ifeq ($(PCLOUD_CACHE),yes) @( $(ROOTDIR)/support/lib.sh download $(LIB_NAME) &&\ $(ROOTDIR)/support/lib.sh unpack $(LIB_NAME) ) || true @touch $(BUILDDIR)/$(LIB_NAME)_static_stamp @@ -71,7 +73,7 @@ endif # Download and Upload on demand .PHONY: libcacheput libcacheput: build -ifneq ($(BINTRAY_USER),) +ifneq ($(PCLOUD_USER),) @$(ROOTDIR)/support/lib.sh upload $(LIB_NAME) $(LIB_FILES) endif diff --git a/configure b/configure index 9f34ac36f..a4b901b61 100755 --- a/configure +++ b/configure @@ -72,7 +72,7 @@ OPTIONS=( "gtimer_check:no" "slow_memoryinfo:no" "libsystemd_daemon:no" - "bintray_cache:yes" + "pcloud_cache:yes" "ddci:yes" "cclang_threadsan:no" "gperftools:no" diff --git a/support/lib.sh b/support/lib.sh index 9d72bbb24..d20134f7d 100755 --- a/support/lib.sh +++ b/support/lib.sh @@ -35,8 +35,9 @@ # Config # ############################################################################ -[ -z "$BINTRAY_REPO" ] && BINTRAY_REPO=tvheadend/misc -[ -z "$ARCH" ] && ARCH=$(uname -m) +[ -z "$PCLOUD_BASEDIR" ] && PCLOUD_BASEDIR=misc +[ -z "$PCLOUD_HASHDIR" ] && PCLOUD_HASHDIR=kZ54ee7ZUvsSYmb9VGSpnmoVzcAUhpBXLq8k +[ -z "$ARCH" ] && ARCH=$(uname -m) if [ -z "$CODENAME" ]; then CODENAME=$(lsb_release -irs 2> /dev/null) if [ -z "$CODENAME" -a -f /etc/lsb-release ]; then @@ -51,7 +52,7 @@ if [ -z "$CODENAME" ]; then CODENAME="unknown" fi fi -CODENAME=$(echo "$CODENAME" | tr '\n' ' ' | sed -e 's/[[:blank:]]*$//g' -e 's: :%20:g') +CODENAME=$(echo "$CODENAME" | tr '\n' ' ' | sed -e 's/[[:blank:]]*$//g') # Convert amd64 to x86_64 (ensure uniformity) [ "${ARCH}" = "amd64" ] && ARCH=x86_64 @@ -67,13 +68,13 @@ CODENAME=$(echo "$CODENAME" | tr '\n' ' ' | sed -e 's/[[:blank:]]*$//g' -e 's: : function hash () { - LIB_NAME=$1 + LIB_NAME="$1" T=" $(cat ${ROOTDIR}/Makefile.${LIB_NAME}) $(grep ^CONFIG_ ${ROOTDIR}/.config.mk) " - H=$(echo ${T} | sha1sum | cut -d' ' -f1) - echo ${H} + H=$(echo "${T}" | sha1sum | cut -d' ' -f1) + echo "${H}" } # @@ -81,36 +82,36 @@ $(grep ^CONFIG_ ${ROOTDIR}/.config.mk) # function download { - LIB_NAME=$1 + LIB_NAME="$1" LIB_HASH=$(hash ${LIB_NAME}) - P=${BUILDDIR}/.${LIB_NAME}-${LIB_HASH}.tgz + P="${BUILDDIR}/.${LIB_NAME}-${LIB_HASH}.tgz" # Cleanup - rm -f ${BUILDDIR}/.${LIB_NAME}*.tmp + rm -f "${BUILDDIR}/.${LIB_NAME}"*.tmp # Already downloaded - [ -f ${P} ] && return 0 + [ -f "${P}" ] && return 0 # Create directory - [ ! -d ${BUILDDIR} ] && mkdir -p ${BUILDDIR} + [ ! -d "${BUILDDIR}" ] && mkdir -p "${BUILDDIR}" # Attempt to fetch - N=staticlib/${CODENAME}/${ARCH}/${LIB_NAME}-${LIB_HASH}.tgz - URL="https://dl.bintray.com/${BINTRAY_REPO}/${N}" - echo "DOWNLOAD ${URL}" - wget -O ${P}.tmp "${URL}" + N="${PCLOUD_BASEDIR}/staticlib/${CODENAME}/${ARCH}/${LIB_NAME}-${LIB_HASH}.tgz" + + echo "DOWNLOAD ${N} / ${PCLOUD_HASHDIR}" + ${ROOTDIR}/support/pcloud.py publink_download "${PCLOUD_HASHDIR}" "${N}" "${P}.tmp" + R=$? - # Don't know why but having weird issue with curl not returning data # Failed if [ ${R} -ne 0 ]; then echo "FAILED TO DOWNLOAD ${URL} (BUT THIS IS NOT A FATAL ERROR! DO NOT REPORT THAT!)" - rm -f ${P}.tmp + rm -f "${P}.tmp" return ${R} fi # Move tmp file - mv ${P}.tmp ${P} || return 1 + mv "${P}.tmp" "${P}" || return 1 return ${R} } @@ -120,30 +121,30 @@ function download # function unpack { - LIB_NAME=$1 + LIB_NAME="$1" LIB_HASH=$(hash ${LIB_NAME}) - P=${BUILDDIR}/.${LIB_NAME}-${LIB_HASH}.tgz - U=${BUILDDIR}/.${LIB_NAME}.unpack + P="${BUILDDIR}/.${LIB_NAME}-${LIB_HASH}.tgz" + U="${BUILDDIR}/.${LIB_NAME}.unpack" # Not downloaded - [ ! -f ${P} ] && return 1 + [ ! -f "${P}" ] && return 1 # Already unpacked? - if [ -f ${U} ]; then - H=$(cat ${U}) - [ "${LIB_HASH}" = ${H} ] && return 0 + if [ -f "${U}" ]; then + H=$(cat "${U}") + [ "${LIB_HASH}" = "${H}" ] && return 0 fi # Cleanup - rm -rf ${BUILDDIR}/${LIB_NAME} || return 1 - mkdir -p ${BUILDDIR}/${LIB_NAME} || return 1 + rm -rf "${BUILDDIR}/${LIB_NAME}" || return 1 + mkdir -p "${BUILDDIR}/${LIB_NAME}" || return 1 # Unpack echo "UNPACK ${P}" - tar -C ${BUILDDIR}/${LIB_NAME} -xf ${P} || return 1 + tar -C "${BUILDDIR}/${LIB_NAME}" -xf "${P}" || return 1 # Record - echo ${LIB_HASH} > ${U} + echo "${LIB_HASH}" > "${U}" } # @@ -154,10 +155,10 @@ function upload LIB_NAME=$1; shift LIB_FILES=$* LIB_HASH=$(hash ${LIB_NAME}) - P=${BUILDDIR}/.${LIB_NAME}-${LIB_HASH}.tgz + P="${BUILDDIR}/.${LIB_NAME}-${LIB_HASH}.tgz" # Can't upload - [ -z "${BINTRAY_USER}" -o -z "${BINTRAY_PASS}" ] && return 0 + [ -z "${PCLOUD_USER}" -o -z "${PCLOUD_PASS}" ] && return 0 # Don't need to upload [ -f "${P}" ] && return 0 @@ -170,13 +171,12 @@ function upload tar -C ${BUILDDIR}/${LIB_NAME} -zcf ${P}.tmp ${LIB_FILES} || return 1 # Upload - N=staticlib/${CODENAME}/${ARCH}/${LIB_NAME}-${LIB_HASH}.tgz - BPATH="/content/${BINTRAY_REPO}/staticlib/${LIB_NAME}-${LIB_HASH}/${N};publish=1" - echo "UPLOAD ${BPATH}" - ${ROOTDIR}/support/bintray.py upload ${BPATH} ${P}.tmp || return 1 + N="${PCLOUD_BASEDIR}/staticlib/${CODENAME}/${ARCH}/${LIB_NAME}-${LIB_HASH}.tgz" + echo "UPLOAD ${N}" + ${ROOTDIR}/support/pcloud.py upload "${N}" "${P}.tmp" || return 1 # Done - mv ${P}.tmp ${P} || return 1 + mv "${P}.tmp" "${P}" || return 1 } # Run command diff --git a/support/pcloud.py b/support/pcloud.py new file mode 100755 index 000000000..91468647a --- /dev/null +++ b/support/pcloud.py @@ -0,0 +1,426 @@ +#! /usr/bin/env python + +# +# TVH pcloud tool, compatible with both python2 and python3 +# +# GPLv2 licence, forked code from +# https://raw.githubusercontent.com/tomgross/pycloud/master/src/pcloud/api.py +# + +import os +import sys +import traceback +import requests +import json +from hashlib import sha1 +from io import BytesIO +from os.path import basename + +def env(key): + if key in os.environ: return os.environ[key] + return None + +PCLOUD_USER=env('PCLOUD_USER') +PCLOUD_PASS=env('PCLOUD_PASS') + +DEBUG=False + +# File open flags https://docs.pcloud.com/methods/fileops/file_open.html +O_WRITE = int('0x0002', 16) +O_CREAT = int('0x0040', 16) +O_EXCL = int('0x0080', 16) +O_TRUNC = int('0x0200', 16) +O_APPEND = int('0x0400', 16) + +def error(lvl, msg, *args): + sys.stderr.write(msg % args + '\n') + sys.exit(lvl) + +def split_path(path): + r = [] + while 1: + path, folder = os.path.split(path) + if folder != '': + r.append(folder) + else: + if path != '': + r.append(path) + break + r.reverse() + return r + +def pcloud_normpath(path): + if not path: + return '/' + if path[0] != '/': + path = '/' + path + return path + +def pcloud_extract_publink_data(text): + text = text.decode('utf-8') + pos = text.find('var publinkData = {') + if pos < 0: raise(ContentsError) + text = text[pos+18:] + pos = text.find('};') + if pos < 0: raise(ContentsError) + text = text[:pos+1] + return json.loads(text) + +# Exceptions +class AuthenticationError(Exception): + """ Authentication failed """ + +# Exceptions +class ContentsError(Exception): + """ Authentication failed """ + +# Validation +class RequiredParameterCheck(object): + """ A decorator that checks function parameter + """ + + def __init__(self, required): + self.required = required + + def __call__(self, func): + def wrapper(*args, **kwargs): + found_paramater = False + for req in self.required: + if req in kwargs: + found_paramater = True + break + if found_paramater: + return func(*args, **kwargs) + else: + raise ValueError('One required parameter `%s` is missing', + ', '.join(self.required)) + wrapper.__name__ = func.__name__ + wrapper.__dict__.update(func.__dict__) + wrapper.__doc__ = func.__doc__ + return wrapper + +class PyCloud(object): + + endpoint = 'https://api.pcloud.com/' + + def __init__(self, username, password): + self.username = username.lower().encode('utf-8') + self.password = password.encode('utf-8') + self.session = requests.Session() + self.auth_token = self.get_auth_token() + + def _do_request(self, method, authenticate=True, json=True, **kw): + if authenticate: + params = {'auth': self.auth_token} + else: + params = {} + params.update(kw) + #log.debug('Doing request to %s%s', self.endpoint, method) + #log.debug('Params: %s', params) + resp = self.session.get(self.endpoint + method, params=params, timeout=30) + if json: + return resp.json() + else: + return resp.content + + # Authentication + def getdigest(self): + resp = self._do_request('getdigest', authenticate=False) + try: + return bytes(resp['digest'], 'utf-8') + except: + return bytes(resp['digest']) + + def get_auth_token(self): + digest = self.getdigest() + try: + uhash = bytes(sha1(self.username).hexdigest(), 'utf-8') + except: + uhash = bytes(sha1(self.username).hexdigest()) + passworddigest = sha1(self.password + uhash + digest) + params = { + 'getauth': 1, + 'logout': 1, + 'username': self.username.decode('utf-8'), + 'digest': digest.decode('utf-8'), + 'passworddigest': passworddigest.hexdigest()} + resp = self._do_request('userinfo', authenticate=False, **params) + if 'auth' not in resp: + raise(AuthenticationError) + return resp['auth'] + + # Folders + @RequiredParameterCheck(('path', 'folderid')) + def createfolder(self, **kwargs): + return self._do_request('createfolder', **kwargs) + + @RequiredParameterCheck(('path', 'folderid')) + def listfolder(self, **kwargs): + return self._do_request('listfolder', **kwargs) + + @RequiredParameterCheck(('path', 'folderid')) + def renamefolder(self, **kwargs): + return self._do_request('renamefolder', **kwargs) + + @RequiredParameterCheck(('path', 'folderid')) + def deletefolder(self, **kwargs): + return self._do_request('deletefolder', **kwargs) + + @RequiredParameterCheck(('path', 'folderid')) + def deletefolderrecursive(self, **kwargs): + return self._do_request('deletefolderrecursive', **kwargs) + + def _upload(self, method, files, **kwargs): + kwargs['auth'] = self.auth_token + resp = self.session.post( + self.endpoint + method, + files=files, + data=kwargs) + return resp.json() + + @RequiredParameterCheck(('files', 'data')) + def uploadfile(self, **kwargs): + """ upload a file to pCloud + + 1) You can specify a list of filenames to read + files=[('/home/pcloud/foo.txt', 'foo-on-cloud.txt'), + ('/home/pcloud/bar.txt', 'bar-on-cloud.txt')] + + 2) you can specify binary data via the data parameter and + need to specify the filename too + data='Hello pCloud', filename='foo.txt' + """ + if 'files' in kwargs: + files = {} + upload_files = kwargs.pop('files') + for f in upload_files: + filename = basename(f[1]) + files = {filename: (filename, open(f[0], 'rb'))} + kwargs['filename'] = filename + else: # 'data' in kwargs: + files = {'f': (kwargs.pop('filename'), kwargs.pop('data'))} + return self._upload('uploadfile', files, **kwargs) + + @RequiredParameterCheck(('progresshash',)) + def uploadprogress(self, **kwargs): + return self._do_request('uploadprogress', **kwargs) + + @RequiredParameterCheck(('path', 'folderid')) + def downloadfile(self, **kwargs): + return self._do_request('downloadfile', **kwargs) + + def copyfile(self, **kwargs): + pass + + @RequiredParameterCheck(('path', 'fileid')) + def checksumfile(self, **kwargs): + return self._do_request('checksumfile', **kwargs) + + @RequiredParameterCheck(('path', 'fileid')) + def deletefile(self, **kwargs): + return self._do_request('deletefile', **kwargs) + + def renamefile(self, **kwargs): + return self._do_request('renamefile', **kwargs) + + # Auth API methods + def sendverificationemail(self, **kwargs): + return self._do_request('sendverificationemail', **kwargs) + + def verifyemail(self, **kwargs): + return self._do_request('verifyemail', **kwargs) + + def changepassword(self, **kwargs): + return self._do_request('changepassword', **kwargs) + + def lostpassword(self, **kwargs): + return self._do_request('lostpassword', **kwargs) + + def resetpassword(self, **kwargs): + return self._do_request('resetpassword', **kwargs) + + def register(self, **kwargs): + return self._do_request('register', **kwargs) + + def invite(self, **kwargs): + return self._do_request('invite', **kwargs) + + def userinvites(self, **kwargs): + return self._do_request('userinvites', **kwargs) + + def logout(self, **kwargs): + return self._do_request('logout', **kwargs) + + def listtokens(self, **kwargs): + return self._do_request('listtokens', **kwargs) + + def deletetoken(self, **kwargs): + return self._do_request('deletetoken', **kwargs) + + # File API methods + @RequiredParameterCheck(('flags',)) + def file_open(self, **kwargs): + return self._do_request('file_open', **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_read(self, **kwargs): + return self._do_request('file_read', json=False, **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_pread(self, **kwargs): + return self._do_request('file_pread', json=False, **kwargs) + + @RequiredParameterCheck(('fd', 'data')) + def file_pread_ifmod(self, **kwargs): + return self._do_request('file_pread_ifmod', json=False, **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_size(self, **kwargs): + return self._do_request('file_size', **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_truncate(self, **kwargs): + return self._do_request('file_truncate', **kwargs) + + @RequiredParameterCheck(('fd', 'data')) + def file_write(self, **kwargs): + files = {'filename': BytesIO(kwargs['data'])} + return self._upload('file_write', files, **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_pwrite(self, **kwargs): + return self._do_request('file_pwrite', **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_checksum(self, **kwargs): + return self._do_request('file_checksum', **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_seek(self, **kwargs): + return self._do_request('file_seek', **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_close(self, **kwargs): + return self._do_request('file_close', **kwargs) + + @RequiredParameterCheck(('fd',)) + def file_lock(self, **kwargs): + return self._do_request('file_lock', **kwargs) + +def simple(method, **kwargs): + if not PCLOUD_USER or not PCLOUD_PASS: + error(2, 'No credentals') + pcloud = PyCloud(PCLOUD_USER, PCLOUD_PASS) + return getattr(pcloud, method)(**kwargs) + +def do_listfolder(*args): + kwargs={'path':'/'} + if len(args) > 0: kwargs['path'] = pcloud_normpath(args[0]) + r = simple('listfolder', **kwargs) + if r['result']: + error(10, 'Unable to list folder %s (%s: %s)', args[0], r['result'], r['error']) + m = r['metadata'] + print('Modified:', m['modified']) + for i in m['contents']: + print(repr(i)) + +def do_createfolder(*args): + if len(args) < 1: error(1, 'createfolder [path]') + path = pcloud_normpath(args[0]) + r = simple('createfolder', path=path) + if r['result'] == 2004: # Folder already exists + return 0 + if r['result']: + error(10, 'Unable to create folder %s (%s: %s)', path, r['result'], r['error']) + +def do_upload(*args): + if len(args) < 2: error(1, 'upload [full path] [source file]') + path = pcloud_normpath(args[0]) + path, file = os.path.split(path) + r = simple('uploadfile', path=path, files=[(args[1], file)], nopartial=1) + if r['result'] == 2005: # directory does not exist + s = split_path(path) + s.reverse() + p = '' + while s: + p += '/' + s.pop() + if p != '//': + do_createfolder(p) + else: + p = '/' + r = simple('uploadfile', path=path, files=[(args[1], file)], nopartial=1) + if r['result']: + error(10, 'Unable to upload %s to %s (%s: %s)', args[1], path, r['result'], r['error']) + +def do_publink_download(*args): + if len(args) < 3: error(1, 'download [root-hash] [full path] [output path]') + session = requests.Session() + path = pcloud_normpath(args[1]) + resp = session.get('https://my.pcloud.com/publink/show?code=%s' % args[0], timeout=30) + if resp.status_code != 200: + error(10, 'Unable to retreive publink %s', args[0]) + pdata = pcloud_extract_publink_data(resp.content) + meta = pdata['metadata'] + if not meta: + error(10, 'No metadata, object probably does not exist!') + s = split_path(path[1:])[:-1] + s.reverse() + name = s.pop() + if meta['name'] != name: + error(10, 'Root folder name does not match ("%s" - "%s")', meta['name'], name) + ctx = meta['contents'] + fctx = None + while s: + name = s.pop() + found = 0 + for item in ctx: + if name == item['name']: + if 'contents' in item: + ctx = item['contents'] + else: + fctx = item + ctx = [] + found = 1 + break + if not found: + error(10, 'Folder name "%s" not found', name) + if not fctx: + error(10, 'Filename "%s" not found', path) + resp = session.get('https://api.pcloud.com/getpublinkdownload?fileid=%s&hashCache=%s&code=%s' % (fctx['fileid'], fctx['hash'], args[0]), timeout=30) + if resp.status_code != 200: + error(10, 'Unable to get file json for "%s"!' % path) + j = resp.json() + if len(j['hosts']) <= 0: + error(10, 'No hosts?') + for idx in range(len(j['hosts'])): + resp = session.get('https://%s%s' % (j['hosts'][idx], j['path']), timeout=30) + if resp.status_code == 200: + break + if resp.status_code != 200: + error(10, 'Unable to retreive file content for "%s"!' % path) + if len(resp.content) == 0: + error(10, 'Empty') + fp = open(args[2], sys.version_info[0] < 3 and "w+" or "bw+") + fp.write(resp.content) + fp.close() + +def do_unknown(*args): + r = 'Please, specify a valid command:\n' + for n in globals(): + if n.startswith('do_') and n != 'do_unknown': + r += ' ' + n[3:] + '\n' + error(1, r[:-1]) + +def main(argv): + global DEBUG + if len(argv) > 1 and argv[1] == '--debug': + DEBUG=1 + argv.pop(0) + cmd = 'do_' + (len(argv) > 1 and argv[1] or 'unknown') + if cmd in globals(): + globals()[cmd](*argv[2:]) + else: + do_unknown() + +if __name__ == "__main__": + main(sys.argv) -- 2.47.2