From 6a46220293f0faa825c631f0a57a5edbdd13d9f7 Mon Sep 17 00:00:00 2001 From: Ralf Hubert Date: Wed, 10 Apr 2019 13:23:40 +0200 Subject: [PATCH 1/2] archive: added JFrog Artifactory backend The JFrog artifactory backend uses the dohq-artifcatory python lib and supports the authentication methods (username+password, API-Key, global configuration file) provided by this library. --- doc/manual/configuration.rst | 10 +- pym/bob/archive.py | 232 ++++++++++++++++++++++++++++++++++- pym/bob/input.py | 10 +- 3 files changed, 248 insertions(+), 4 deletions(-) diff --git a/doc/manual/configuration.rst b/doc/manual/configuration.rst index 545df8a81..91ac47acb 100644 --- a/doc/manual/configuration.rst +++ b/doc/manual/configuration.rst @@ -1515,6 +1515,12 @@ the following table for supported backends and their configuration. Backend Description =========== =================================================================== none Do not use a binary repository (default). +artifactory JFrog Artifactory backend. Use the ``url`` keyword to provide the + repository url. Use optional keys ``username`` to specify the user + and ``key`` for the API-key or password. Without ``username`` and + ``key`` either no authentication or the global artifactory + configuration file is used. See dohq-artifactory documentation for + details. azure Microsoft Azure Blob storage backend. The account must be specified in the ``account`` key. Either a ``key`` or a ``sasToken`` may be set to authenticate, otherwise an anonymous access is used. @@ -1535,8 +1541,8 @@ shell This backend can be used to execute commands that do the actual up- example below for a possible use with ``scp``. =========== =================================================================== -The directory layouts of the ``azure``, ``file``, ``http`` and ``shell`` -(``$BOB_REMOTE_ARTIFACT``) backends are compatible. If multiple download +The directory layouts of the ``artifactory``, ``azure``, ``file``, ``http`` and +``shell`` (``$BOB_REMOTE_ARTIFACT``) backends are compatible. If multiple download backends are available they will be tried in order until a matching artifact is found. All available upload backends are used for uploading artifacts. Any failing upload will fail the whole build. diff --git a/pym/bob/archive.py b/pym/bob/archive.py index 2c463cd83..faad21f78 100644 --- a/pym/bob/archive.py +++ b/pym/bob/archive.py @@ -21,7 +21,7 @@ from .errors import BuildError from .tty import stepAction, SKIPPED, EXECUTED, WARNING, INFO, TRACE, ERROR -from .utils import asHexStr, removePath, isWindows +from .utils import asHexStr, hashFile, removePath, isWindows from pipes import quote from tempfile import mkstemp, NamedTemporaryFile, TemporaryFile import argparse @@ -983,6 +983,230 @@ def __upload(self): except AzureException as e: raise ArtifactUploadError(str(e)) +class ArtifactoryArchive(BaseArchive): + def __init__(self, spec=None): + if spec: + super().__init__(spec) + self.__url = spec['url'] + self.__username = spec.get('username', None) + self.__key = spec.get('key', None) + try: + from artifactory import ArtifactoryPath + except ImportError: + raise BuildError("dohq-artifactory Python3 library not installed!") + + def setArgs(self, args): + self.__url = args.url + self.__username = args.username + self.__key = args.key + + @staticmethod + def __makeBlobName(buildId, suffix): + packageResultId = buildIdToName(buildId) + return "/".join([packageResultId[0:2], packageResultId[2:4], + packageResultId[4:] + suffix]) + + def _remoteName(self, buildId, suffix): + return "{}/{}".format(self.__url, self.__makeBlobName(buildId, suffix)) + + def _makeArtifactoryPath(self, blobName): + from artifactory import ArtifactoryPath + + if self.__username and self.__key: + return ArtifactoryPath( + "{}/{}".format(self.__url, blobName.replace(os.sep, '_')), + auth=(self.__username, self.__key)) + elif self.__key: + return ArtifactoryPath( + "{}/{}".format(self.__url, blobName.replace(os.sep, '_')), + apikey=self.__key) + else: + return ArtifactoryPath( + "{}/{}".format(self.__url, blobName.replace(os.sep, '_'))) + + def _openDownloadFile(self, buildId, suffix): + from artifactory import ArtifactoryPath + try: + fd = self._makeArtifactoryPath(self.__makeBlobName(buildId,suffix)).open() + return ArtifactoryDownloader(fd) + except RuntimeError as e: + raise ArtifactDownloadError(str(e)) + + def _openUploadFile(self, buildId, suffix): + blobName = self.__makeBlobName(buildId,suffix) + from artifactory import ArtifactoryPath + try: + fd = self._makeArtifactoryPath(blobName) + try: + fd.open() + if fd.exists(): + raise ArtifactExistsError() + except RuntimeError: + pass + except RuntimeError as e: + raise ArtifactUploadError(str(e)) + (tmpFd, tmpName) = mkstemp() + os.close(tmpFd) + return ArtifactoryUploader(self, fd, tmpName, blobName) + + def upload(self, step, buildIdFile, fingerprintFile, tgzFile): + if not self.canUploadJenkins(): + return "" + + args = [] + if self.__key: args.append("--key=" + self.__key) + if self.__username: args.append("--username=" + self.__username) + + return "\n" + textwrap.dedent("""\ + # upload artifact + cd $WORKSPACE + bob _upload artifactory {ARGS} {URL} {BUILDID} {SUFFIX} {RESULT}{FIXUP} + """.format(ARGS=" ".join(map(quote, args)), + URL=self.__url, BUILDID=quote(buildIdFile), + RESULT=quote(tgzFile), + FIXUP=" || echo Upload failed: $?" if self._ignoreErrors() else "", + SUFFIX=artifactSuffixJenkins(fingerprintFile))) + + def download(self, step, buildIdFile, fingerprintFile, tgzFile): + if not self.canDownloadJenkins(): + return "" + + args = [] + if self.__key: args.append("--key=" + self.__key) + if self.__username: args.append("--username=" + self.__username) + + return "\n" + textwrap.dedent("""\ + if [[ ! -e {RESULT} ]] ; then + bob _download artifactory {ARGS} {URL} {BUILDID} {SUFFIX} {RESULT} || echo Download failed: $? + fi + """.format(ARGS=" ".join(map(quote, args)), + URL=self.__url, BUILDID=quote(buildIdFile), + RESULT=quote(tgzFile), SUFFIX=artifactSuffixJenkins(fingerprintFile))) + + def uploadJenkinsLiveBuildId(self, step, liveBuildId, buildId): + if not self.canUploadJenkins(): + return "" + + args = [] + if self.__key: args.append("--key=" + self.__key) + if self.__username: args.append("--username=" + self.__username) + + return "\n" + textwrap.dedent("""\ + # upload live build-id + cd $WORKSPACE + bob _upload artifactory {ARGS} {URL} {LIVEBUILDID} {SUFFIX} {BUILDID}{FIXUP} + """.format(ARGS=" ".join(map(quote, args)), + URL=self.__url, LIVEBUILDID=quote(liveBuildId), + BUILDID=quote(buildId), + FIXUP=" || echo Upload failed: $?" if self._ignoreErrors() else "", + SUFFIX=BUILDID_SUFFIX)) + + @staticmethod + def scriptDownload(args): + archive, remoteBlob, localFile = ArtifactoryArchive.scriptGetService(args) + + from artifactory import ArtifactoryPath + + # Download into temporary file and rename if downloaded successfully + tmpName = None + try: + (tmpFd, tmpName) = mkstemp(dir=".") + + path = archive._makeArtifactoryPath(remoteBlob) + + with path.open() as fd: + os.write(tmpFd, fd.read()) + + os.close(tmpFd) + os.rename(tmpName, localFile) + tmpName = None + except (OSError, RuntimeError) as e: + raise BuildError("Download failed: " + str(e)) + finally: + if tmpName is not None: os.unlink(tmpName) + + @staticmethod + def scriptUpload(args): + archive, remoteBlob, localFile = ArtifactoryArchive.scriptGetService(args) + + from artifactory import ArtifactoryPath, md5sum, sha1sum + try: + sha1 = sha1sum(localFile) + md5 = md5sum(localFile) + with open(localFile, 'rb') as f: + fd = archive._makeArtifactoryPath(remoteBlob) + if fd.exists: + print("skipped") + else: + fd.deploy(f, sha1=sha1, md5=md5); + print("OK") + except (OSError, RuntimeError) as e: + raise BuildError("Upload failed: " + str(e)) + + @staticmethod + def scriptGetService(args): + parser = argparse.ArgumentParser() + parser.add_argument('url') + parser.add_argument('buildid') + parser.add_argument('suffix') + parser.add_argument('file') + parser.add_argument('--key') + parser.add_argument('--username') + args = parser.parse_args(args) + + try: + from artifactory import ArtifactoryPath + except ImportError: + raise BuildError("artifactory Python3 library not installed!") + + try: + with open(args.buildid, 'rb') as f: + remoteBlob = ArtifactoryArchive.__makeBlobName(f.read(), args.suffix) + except OSError as e: + raise BuildError(str(e)) + + archive = ArtifactoryArchive() + archive.setArgs(args) + + return (archive, remoteBlob, args.file) + +class ArtifactoryDownloader: + def __init__(self, fd): + self.fd = fd + def __enter__(self): + return (None, self.fd) + def __exit__(self, exc_type, exc_value, traceback): + self.fd.close() + return False + +class ArtifactoryUploader: + def __init__(self, artifactory, fd, name, remoteName): + self.__artifactory = artifactory + self.__name = name + self.__remoteName = remoteName + + def __enter__(self): + return (self.__name, None) + + def __exit__(self, exc_type, exc_value, traceback): + try: + if exc_type is None: + self.__upload() + finally: + os.unlink(self.__name) + return False + + def __upload(self): + from artifactory import ArtifactoryPath, md5sum, sha1sum + + sha1 = sha1sum(self.__name) + md5 = md5sum(self.__name) + try: + with open(self.__name, 'rb') as f: + fd = self.__artifactory._makeArtifactoryPath(self.__remoteName) + fd.deploy(f, sha1=sha1, md5=md5); + except RuntimeError as e: + raise ArtifactUploadError("upload failed "+ str(e)) class MultiArchive: def __init__(self, archives): @@ -1056,6 +1280,8 @@ def getSingleArchiver(recipes, archiveSpec): return CustomArchive(archiveSpec, recipes.envWhiteList()) elif archiveBackend == "azure": return AzureArchive(archiveSpec) + elif archiveBackend == "artifactory": + return ArtifactoryArchive(archiveSpec) elif archiveBackend == "none": return DummyArchive() else: @@ -1072,6 +1298,8 @@ def doDownload(args, bobRoot): archiveBackend = args[0] if archiveBackend == "azure": AzureArchive.scriptDownload(args[1:]) + elif archiveBackend == "artifactory": + ArtifactoryArchive.scriptDownload(args[1:]) else: raise BuildError("Invalid archive backend: "+archiveBackend) @@ -1079,5 +1307,7 @@ def doUpload(args, bobRoot): archiveBackend = args[0] if archiveBackend == "azure": AzureArchive.scriptUpload(args[1:]) + elif archiveBackend == "artifactory": + ArtifactoryArchive.scriptUpload(args[1:]) else: raise BuildError("Invalid archive backend: "+archiveBackend) diff --git a/pym/bob/input.py b/pym/bob/input.py index 97bb6f04f..00d80db8c 100644 --- a/pym/bob/input.py +++ b/pym/bob/input.py @@ -2526,7 +2526,8 @@ def touch(self, inputEnv, inputTools): class ArchiveValidator: def __init__(self): - self.__validTypes = schema.Schema({'backend': schema.Or('none', 'file', 'http', 'shell', 'azure')}, + self.__validTypes = schema.Schema({'backend': schema.Or('none', + 'file', 'http', 'shell', 'azure', 'artifactory')}, ignore_extra_keys=True) baseArchive = { 'backend' : str, @@ -2550,12 +2551,19 @@ def __init__(self): schema.Optional('key') : str, schema.Optional('sasToken"') : str, }) + artifactoryArchive = baseArchive.copy() + artifactoryArchive.update({ + 'url' : str, + schema.Optional('key') : str, + schema.Optional('username') : str, + }) self.__backends = { 'none' : schema.Schema(baseArchive), 'file' : schema.Schema(fileArchive), 'http' : schema.Schema(httpArchive), 'shell' : schema.Schema(shellArchive), 'azure' : schema.Schema(azureArchive), + 'artifactory' : schema.Schema(artifactoryArchive), } def validate(self, data): From 1e812c2a9730d866e0e169c14ab9d92a9aa84dee Mon Sep 17 00:00:00 2001 From: Ralf Hubert Date: Sun, 5 May 2019 16:42:55 +0200 Subject: [PATCH 2/2] artifactory-archive: add properties to artifcats Artifactory supports properties for artifcats. Added the properties key for archive spec to provide archive properties. The properties are substitued whit the packageStep / metaEnv of the package. archive: backend: "artifactory" url: "http://localhost:8081/artifactory/example-repo-local" properties: FOO: "42" BAR: "13" TEST: "${TEST:-NotSet}" --- pym/bob/archive.py | 106 +++++++++++++++++++++++++++++++-------------- pym/bob/input.py | 1 + 2 files changed, 75 insertions(+), 32 deletions(-) diff --git a/pym/bob/archive.py b/pym/bob/archive.py index faad21f78..fe15bc138 100644 --- a/pym/bob/archive.py +++ b/pym/bob/archive.py @@ -31,6 +31,7 @@ import gzip import hashlib import http.client +import json import os import os.path import signal @@ -249,7 +250,7 @@ def _downloadLocalLiveBuildId(self, liveBuildId): except OSError as e: raise BuildError("Cannot download artifact: " + str(e)) - def _openUploadFile(self, buildId, suffix): + def _openUploadFile(self, buildId, suffix, env): raise ArtifactUploadError("not implemented") async def uploadPackage(self, step, buildId, audit, content): @@ -259,20 +260,24 @@ async def uploadPackage(self, step, buildId, audit, content): loop = asyncio.get_event_loop() suffix = ARTIFACT_SUFFIX details = " to {}".format(self._remoteName(buildId, suffix)) + + env = step.getEnv() + env.update(step.getPackage().getMetaEnv()) + with stepAction(step, "UPLOAD", content, details=details) as a: try: msg, kind = await loop.run_in_executor(None, BaseArchive._uploadPackage, - self, buildId, suffix, audit, content) + self, buildId, suffix, audit, content, env) a.setResult(msg, kind) except (concurrent.futures.CancelledError, concurrent.futures.process.BrokenProcessPool): raise BuildError("Upload of package interrupted.") - def _uploadPackage(self, buildId, suffix, audit, content): + def _uploadPackage(self, buildId, suffix, audit, content, env): # restore signals to default so that Ctrl+C kills us signal.signal(signal.SIGINT, signal.SIG_DFL) try: - with self._openUploadFile(buildId, suffix) as (name, fileobj): + with self._openUploadFile(buildId, suffix, env) as (name, fileobj): pax = { 'bob-archive-vsn' : "1" } with gzip.open(name or fileobj, 'wb', 6) as gzf: with tarfile.open(name, "w", fileobj=gzf, @@ -293,19 +298,23 @@ async def uploadLocalLiveBuildId(self, step, liveBuildId, buildId): return loop = asyncio.get_event_loop() + env = step.getEnv() + env.update(step.getPackage().getMetaEnv()) with stepAction(step, "CACHE-BID", self._remoteName(liveBuildId, BUILDID_SUFFIX), (INFO,TRACE)) as a: try: - msg, kind = await loop.run_in_executor(None, BaseArchive._uploadLocalLiveBuildId, self, liveBuildId, buildId) + msg, kind = await loop.run_in_executor(None, + BaseArchive._uploadLocalLiveBuildId, self, + liveBuildId, buildId, env) a.setResult(msg, kind) except (concurrent.futures.CancelledError, concurrent.futures.process.BrokenProcessPool): raise BuildError("Upload of build-id interrupted.") - def _uploadLocalLiveBuildId(self, liveBuildId, buildId): + def _uploadLocalLiveBuildId(self, liveBuildId, buildId, env): # restore signals to default so that Ctrl+C kills us signal.signal(signal.SIGINT, signal.SIG_DFL) try: - with self._openUploadFile(liveBuildId, BUILDID_SUFFIX) as (name, fileobj): + with self._openUploadFile(liveBuildId, BUILDID_SUFFIX, env) as (name, fileobj): writeFileOrHandle(name, fileobj, buildId) except ArtifactExistsError: return ("skipped (exists in archive)", SKIPPED) @@ -340,7 +349,7 @@ def _openDownloadFile(self, buildId, suffix): else: raise ArtifactNotFoundError() - def _openUploadFile(self, buildId, suffix): + def _openUploadFile(self, buildId, suffix, env): (packageResultPath, packageResultFile) = self._getPath(buildId, suffix) if os.path.isfile(packageResultFile): raise ArtifactExistsError() @@ -516,7 +525,7 @@ def __openDownloadFile(self, buildId, suffix): raise ArtifactDownloadError("{} {}".format(response.status, response.reason)) - def _openUploadFile(self, buildId, suffix): + def _openUploadFile(self, buildId, suffix, env): (ok, result) = self.__retry(lambda: self.__openUploadFile(buildId, suffix)) if ok: return result @@ -701,7 +710,7 @@ def _openDownloadFile(self, buildId, suffix): finally: if tmpName is not None: os.unlink(tmpName) - def _openUploadFile(self, buildId, suffix): + def _openUploadFile(self, buildId, suffix, env): (tmpFd, tmpName) = mkstemp() os.close(tmpFd) return CustomUploader(tmpName, self._makeUrl(buildId, suffix), self.__whiteList, @@ -823,7 +832,7 @@ def _openDownloadFile(self, buildId, suffix): finally: if tmpName is not None: os.unlink(tmpName) - def _openUploadFile(self, buildId, suffix): + def _openUploadFile(self, buildId, suffix, env): from azure.common import AzureException blobName = self.__makeBlobName(buildId, suffix) @@ -990,6 +999,7 @@ def __init__(self, spec=None): self.__url = spec['url'] self.__username = spec.get('username', None) self.__key = spec.get('key', None) + self.__properties = spec.get('properties', None) try: from artifactory import ArtifactoryPath except ImportError: @@ -999,6 +1009,7 @@ def setArgs(self, args): self.__url = args.url self.__username = args.username self.__key = args.key + self.__properties = args.properties @staticmethod def __makeBlobName(buildId, suffix): @@ -1032,9 +1043,9 @@ def _openDownloadFile(self, buildId, suffix): except RuntimeError as e: raise ArtifactDownloadError(str(e)) - def _openUploadFile(self, buildId, suffix): + def _openUploadFile(self, buildId, suffix, env): blobName = self.__makeBlobName(buildId,suffix) - from artifactory import ArtifactoryPath + try: fd = self._makeArtifactoryPath(blobName) try: @@ -1047,27 +1058,44 @@ def _openUploadFile(self, buildId, suffix): raise ArtifactUploadError(str(e)) (tmpFd, tmpName) = mkstemp() os.close(tmpFd) - return ArtifactoryUploader(self, fd, tmpName, blobName) - def upload(self, step, buildIdFile, fingerprintFile, tgzFile): - if not self.canUploadJenkins(): - return "" + properties = {} + if self.__properties: + from .stringparser import Env + _env = Env(env) + properties = { k : _env.substitute(v, "properties::environment::"+k) + for k, v in self.__properties.items() } + return ArtifactoryUploader(self, fd, tmpName, blobName, properties) + def _getArgs(self, step): args = [] if self.__key: args.append("--key=" + self.__key) if self.__username: args.append("--username=" + self.__username) + if self.__properties: + from .stringparser import Env + env = step.getEnv() + env.update(step.getPackage().getMetaEnv()) + _env = Env(env) + properties = { k : _env.substitute(v, "properties::environment::"+k) + for k, v in self.__properties.items() } + args.append("--properties="+json.dumps(properties)) + return args + + def upload(self, step, buildIdFile, tgzFile): + if not self.canUploadJenkins(): + return "" return "\n" + textwrap.dedent("""\ # upload artifact cd $WORKSPACE bob _upload artifactory {ARGS} {URL} {BUILDID} {SUFFIX} {RESULT}{FIXUP} - """.format(ARGS=" ".join(map(quote, args)), + """.format(ARGS=" ".join(map(quote, self._getArgs(step))), URL=self.__url, BUILDID=quote(buildIdFile), RESULT=quote(tgzFile), FIXUP=" || echo Upload failed: $?" if self._ignoreErrors() else "", - SUFFIX=artifactSuffixJenkins(fingerprintFile))) + SUFFIX=ARTIFACT_SUFFIX)) - def download(self, step, buildIdFile, fingerprintFile, tgzFile): + def download(self, step, buildIdFile, tgzFile): if not self.canDownloadJenkins(): return "" @@ -1081,21 +1109,17 @@ def download(self, step, buildIdFile, fingerprintFile, tgzFile): fi """.format(ARGS=" ".join(map(quote, args)), URL=self.__url, BUILDID=quote(buildIdFile), - RESULT=quote(tgzFile), SUFFIX=artifactSuffixJenkins(fingerprintFile))) + RESULT=quote(tgzFile), SUFFIX=ARTIFACT_SUFFIX)) def uploadJenkinsLiveBuildId(self, step, liveBuildId, buildId): if not self.canUploadJenkins(): return "" - args = [] - if self.__key: args.append("--key=" + self.__key) - if self.__username: args.append("--username=" + self.__username) - return "\n" + textwrap.dedent("""\ # upload live build-id cd $WORKSPACE bob _upload artifactory {ARGS} {URL} {LIVEBUILDID} {SUFFIX} {BUILDID}{FIXUP} - """.format(ARGS=" ".join(map(quote, args)), + """.format(ARGS=" ".join(map(quote, self._getArgs(step))), URL=self.__url, LIVEBUILDID=quote(liveBuildId), BUILDID=quote(buildId), FIXUP=" || echo Upload failed: $?" if self._ignoreErrors() else "", @@ -1127,7 +1151,7 @@ def scriptDownload(args): @staticmethod def scriptUpload(args): - archive, remoteBlob, localFile = ArtifactoryArchive.scriptGetService(args) + archive, remoteBlob, localFile, properties = ArtifactoryArchive.scriptGetService(args) from artifactory import ArtifactoryPath, md5sum, sha1sum try: @@ -1135,10 +1159,16 @@ def scriptUpload(args): md5 = md5sum(localFile) with open(localFile, 'rb') as f: fd = archive._makeArtifactoryPath(remoteBlob) - if fd.exists: + if fd.exists(): print("skipped") else: - fd.deploy(f, sha1=sha1, md5=md5); + fd.deploy(f, sha1=sha1, md5=md5) + if properties: + try: + fd.properties = properties + except RuntimeError: + # properties available only in Artifactory Pro + pass print("OK") except (OSError, RuntimeError) as e: raise BuildError("Upload failed: " + str(e)) @@ -1152,6 +1182,7 @@ def scriptGetService(args): parser.add_argument('file') parser.add_argument('--key') parser.add_argument('--username') + parser.add_argument('--properties', type=str) args = parser.parse_args(args) try: @@ -1168,7 +1199,11 @@ def scriptGetService(args): archive = ArtifactoryArchive() archive.setArgs(args) - return (archive, remoteBlob, args.file) + properties = None + if args.properties: + properties = json.loads(args.properties) + + return (archive, remoteBlob, args.file, properties) class ArtifactoryDownloader: def __init__(self, fd): @@ -1180,10 +1215,11 @@ def __exit__(self, exc_type, exc_value, traceback): return False class ArtifactoryUploader: - def __init__(self, artifactory, fd, name, remoteName): + def __init__(self, artifactory, fd, name, remoteName, properties): self.__artifactory = artifactory self.__name = name self.__remoteName = remoteName + self.__properties = properties def __enter__(self): return (self.__name, None) @@ -1204,7 +1240,13 @@ def __upload(self): try: with open(self.__name, 'rb') as f: fd = self.__artifactory._makeArtifactoryPath(self.__remoteName) - fd.deploy(f, sha1=sha1, md5=md5); + fd.deploy(f, sha1=sha1, md5=md5) + if self.__properties: + try: + fd.properties = self.__properties + except RuntimeError: + # properties available only in Artifactory Pro + pass except RuntimeError as e: raise ArtifactUploadError("upload failed "+ str(e)) diff --git a/pym/bob/input.py b/pym/bob/input.py index 00d80db8c..4f0001dcb 100644 --- a/pym/bob/input.py +++ b/pym/bob/input.py @@ -2555,6 +2555,7 @@ def __init__(self): artifactoryArchive.update({ 'url' : str, schema.Optional('key') : str, + schema.Optional('properties') : schema.Schema({ schema.Regex(r'^[A-Za-z_][A-Za-z0-9_]*$') : str }), schema.Optional('username') : str, }) self.__backends = {