summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonathan Maw <jonathan.maw@codethink.co.uk>2017-05-17 14:09:44 +0100
committerJonathan Maw <jonathan.maw@codethink.co.uk>2017-05-17 14:16:56 +0100
commit4dba62b51b5d01edc692a5980fc4f46a19986907 (patch)
treea6ed48ac94ed4c6fea0c9b77087f16d4be8c0317
parent3db37af2d54bf1d84c75d56289fc7a9d91e39c27 (diff)
downloadybd-staging/jonathan/rpm-caching.tar.gz
Make rpms use a local and remote artifact cachestaging/jonathan/rpm-caching
This will upload RPMs to kbas if they're built, unless kbas-upload is manually set to something that doesn't include "rpm"
-rw-r--r--ybd/rpm.py222
1 files changed, 197 insertions, 25 deletions
diff --git a/ybd/rpm.py b/ybd/rpm.py
index 2f44b0e..d42e1c0 100644
--- a/ybd/rpm.py
+++ b/ybd/rpm.py
@@ -1,7 +1,7 @@
import os
import sys
from collections import Mapping
-from cache import cache_key, get_cache
+from cache import cache_key, get_cache, md5
from app import log, timer
import time
import app
@@ -9,16 +9,20 @@ import sandbox
import shutil
import yaml
import repos
+import requests
# Because rpm is otherwise totally broken
+# NOTE: _build_name_fmt would ordinary be
+# %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm
+# but we are pulling them out of a cache with a different naming scheme now.
#
common_rpm_args = (
'--dbpath=/var/lib/rpm '
'--define "_rpmconfigdir /usr/lib/rpm" '
'--define "_rpmlock_path /var/lib/rpm/.rpm.lock" '
'--define "_fileattrsdir /usr/lib/rpm/fileattrs" '
- '--define "_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm" '
+ '--define "_build_name_fmt %%{NAME}.rpm" '
'--define "_rpmfilename %{_build_name_fmt}" '
'--define "_tmppath /tmp" '
'--define "_unpackaged_files_terminate_build 0" '
@@ -226,6 +230,131 @@ def extract_defines(dn):
return ''.join(strings)
+def get_generated_package_names(system, dn):
+ # Ignores subpackages without files sections as those aren't generated.
+ if 'rpm-metadata' not in dn:
+ app.log(dn, "Tried to get package names for definition "
+ "without rpm-metadata!")
+ sys.exit(1)
+ if 'packages' not in dn['rpm-metadata']:
+ app.log(dn, "Tried to get pacakge names for definition "
+ "with no packages in rpm-metadata!")
+ sys.exit(1)
+ package_names = []
+ for package in dn['rpm-metadata']['packages']:
+ if 'files' in package:
+ package_names.append(expand_macro(system, dn, package['name']))
+ return package_names
+
+
+def get_remote_rpm(dn, pkgfilename):
+ # Defaults to allowing rpms to be fetched from kbas.
+ # Override kbas-upload to prevent this.
+ if 'rpm' not in app.config.get('kbas-upload', 'rpm'):
+ return False
+ if 'kbas-url' not in app.config:
+ return False
+ try:
+ app.log(dn, 'Try downloading', pkgfilename)
+ url = "{}get/{}".format(app.config['kbas-url'], pkgfilename)
+ response = requests.get(url=url, stream=True)
+ except Exception as e:
+ app.config.pop('kbas-url')
+ app.log(dn, "WARNING: Failure to call artifact server: ", e)
+ return False
+
+ if response.status_code == 200:
+ try:
+ tempfile.tempdir = app.config['tmp']
+ tmpdir = tempfile.mkdtemp()
+ cachefile = os.path.join(tmpdir, pkgfilename)
+ with open(cachefile, 'wb') as f:
+ f.write(response.content)
+ cache_dst = os.path.join(app.config['artifacts'], pkgfilename)
+ os.rename(cachefile, cache_dst)
+ return True
+ except Exception as e:
+ app.log(dn, "WARNING: Failed to download {}: {}"
+ .format(pkgfilename, e))
+ return False
+
+
+def get_cache_pkgfilename(pkgname, dn):
+ return "{}.{}.rpm".format(pkgname, cache_key(dn))
+
+
+def all_rpms_cached(system, dn):
+ for pkgname in get_generated_package_names(system, dn):
+ pkgfn = get_cache_pkgfilename(pkgname, dn)
+ cached_path = os.path.join(app.config['artifacts'], pkgfn)
+ if (not os.path.exists(cached_path)
+ and not get_remote_rpm(dn, pkgfn)):
+ return False
+ return True
+
+
+def compose_rpm(dn, userdata):
+ # a chunk lacking in rpm-metadata is not an error.
+ # It means this should be skipped without doing anything.
+ if 'rpm-metadata' not in dn:
+ return True
+ if not all_rpms_cached(userdata['system'], dn):
+ if not package_one_rpm(dn, userdata):
+ return False
+
+ if not cache_generated_rpms(userdata['system'], dn):
+ return False
+
+ return True
+
+
+def upload_generated_rpm(dn, cachefile):
+ filename = os.path.basename(cachefile)
+ url = app.config['kbas-url'] + 'upload'
+ params = {"filename": filename,
+ "password": app.config['kbas-password'],
+ "checksum": md5(cachefile)}
+ with open(cachefile, 'rb') as f:
+ try:
+ response = requests.post(url=url, data=params, files={"file": f})
+ if response.status_code == 201:
+ app.log(dn, 'Uploaded %s to kbas' % filename)
+ return
+ if response.status_code == 777:
+ app.log(dn, 'Reproduced %s at' % md5(cachefile), filename)
+ app.config['reproduced'].append([md5(cachefile), filename])
+ return
+ if response.status_code == 405:
+ app.log(dn, 'Artifact server already has', filename)
+ return
+ app.log(dn, 'Artifact server problem:', response.status_code)
+ except:
+ pass
+ app.log(dn, 'Failed to upload', filename)
+
+
+def cache_generated_rpms(system, dn):
+ rpms_dir = os.path.join(system['sandbox'], 'RPMS')
+ for pkg in get_generated_package_names(system, dn):
+ pkgfile = "{}.rpm".format(pkg)
+ pkgpath = os.path.join(rpms_dir, pkgfile)
+ cachepath = os.path.join(app.config['artifacts'],
+ get_cache_pkgfilename(pkg, dn))
+ if not os.path.exists(pkgpath):
+ app.log(dn, "Can't extract rpms, {} is missing!".format(pkgpath))
+ return False
+ os.rename(pkgpath, cachepath)
+
+ # Upload the cached rpm, if applicable
+ if (app.config.get('kbas-password', 'insecure') != 'insecure'
+ and 'kbas-url' in app.config
+ and 'rpm' in app.config.get('kbas-upload', 'rpm')):
+ with app.timer(dn, 'Upload {}'.format(pkgfile)):
+ upload_generated_rpm(dn, cachepath)
+
+ return True
+
+
def package_one_rpm(dn, userdata):
system = userdata['system']
@@ -258,9 +387,6 @@ def package_one_rpm(dn, userdata):
specfile = os.path.join(metadir, '%s.spec' % name)
success = True
- if 'rpm-metadata' not in dn:
- # No metadata defined for rpm generation, pass.
- return success
if generate_spec(dn, fulldir, metafile, specfile,
name, system):
defines = extract_defines(dn)
@@ -292,6 +418,69 @@ def package_one_rpm(dn, userdata):
return True
+def rpm_deployment_filename(system, dn, rpmpath):
+ # Reads rpm's headers to construct its filename.
+ # It copies the rpm into the sandbox, because the artifact is
+ # outside the sandbox.
+ filename = os.path.basename(rpmpath)
+ sandbox_dstdir = os.path.join(system['sandbox'], 'tmp')
+ sandbox_dstfile = os.path.join(sandbox_dstdir, filename)
+ sandbox_real_rpmpath = os.path.join(sandbox_dstdir, filename)
+ sandbox_rpmpath = os.path.join('/tmp', filename)
+ if not os.path.exists(sandbox_dstdir):
+ os.makedirs(sandbox_dstdir)
+ shutil.copyfile(rpmpath, sandbox_dstfile)
+
+ env_vars = sandbox.env_vars_for_build(system)
+ command = (
+ 'rpm {} {} -q -p {} '
+ '--queryformat="%{{name}}-%{{version}}-%{{release}}.%{{arch}}.rpm"'
+ .format(common_rpm_args, extract_defines(dn), sandbox_rpmpath))
+ ret, out, err = sandbox.run_sandboxed(system, command,
+ env_vars, exit_on_error=False,
+ run_logged=False,
+ print_command=False)
+ os.remove(sandbox_real_rpmpath)
+ if not ret:
+ app.log("ERROR: Failed to generate rpm name, {}".format(err))
+ sys.exit(1)
+ if out is None:
+ app.log(dn, "ERROR: getting rpm deployment filename returned None")
+ sys.exit(1)
+ return out
+
+
+def deploy_rpm(dn, userdata):
+ # Skip this if there's no rpm-metadata
+ if 'rpm-metadata' not in dn:
+ return True
+
+ for pkgname in get_generated_package_names(userdata['system'], dn):
+ pkgfn = get_cache_pkgfilename(pkgname, dn)
+ cached_path = os.path.join(app.config['artifacts'], pkgfn)
+ if not os.path.exists(cached_path):
+ app.log(dn, "WARNING: Missing cached file {}".format(cached_path))
+ return False
+ dstdir = os.path.join(app.config['deployment'],
+ 'RPMs', cache_key(userdata['system']))
+ dstfilename = rpm_deployment_filename(userdata['system'],
+ dn, cached_path)
+ dstpath = os.path.join(dstdir, dstfilename)
+ if not os.path.exists(dstdir):
+ os.makedirs(dstdir)
+ shutil.copyfile(cached_path, dstpath)
+ return True
+
+
+def deploy_rpms(system, whitelist=None):
+ deploy_results = foreach_def(system, deploy_rpm, {'system': system},
+ whitelist=whitelist)
+ errors = any(not t[1] for t in deploy_results)
+ if errors:
+ app.log(system, "ERROR: Failed to deploy all RPMs!")
+ sys.exit(1)
+
+
# package_rpms
# @system: The system to package rpms for
# @whitelist: A whitelist of chunk names to package rpms for
@@ -331,32 +520,15 @@ def package_rpms(system, whitelist=None):
# Package each rpm in order of build dependency
package_results = foreach_def(
- system, package_one_rpm, {'system': system},
+ system, compose_rpm, {'system': system},
whitelist=whitelist)
errors = any(not t[1] for t in package_results)
if errors:
log(system, 'ERROR: Failed to successfully generate all rpms!')
sys.exit(1)
- # Move the resulting RPMS directory into the deployment area
- rpm_destdir = os.path.join(app.config['deployment'], 'RPMs',
- cache_key(system))
- if not os.path.exists(rpm_destdir):
- os.makedirs(rpm_destdir)
- for entry in os.listdir(rpmdir):
- srcfile = os.path.join(rpmdir, entry)
- dstfile = os.path.join(rpm_destdir, entry)
- # I could not move files if they already exist,
- # but the RPMs have already been produced at this point.
- os.rename(os.path.join(rpmdir, entry),
- os.path.join(rpm_destdir, entry))
-
- # Move the generated RPM database into the deployment area
- rpm_dbsrc = os.path.join(system['sandbox'], 'var', 'lib', 'rpm')
- rpm_dbdest = os.path.join(rpm_destdir, "db")
- if os.path.exists(rpm_dbdest):
- shutil.rmtree(rpm_dbdest)
- shutil.move(rpm_dbsrc, rpm_dbdest)
+ deploy_rpms(system, whitelist)
+ app.log(system, "Finished deploying RPMs!")
#