summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Firth <dan.firth@codethink.co.uk>2016-11-22 15:33:27 +0000
committerDaniel Firth <dan.firth@codethink.co.uk>2016-11-22 15:33:27 +0000
commit0f3e02cf891df1a301b83dc054bf0e23a6334f0b (patch)
tree5c9f371fc4e064c0757b4e5fbac07f3f16bda8be
parentf06892f4da0dbcdcc114a8720861fe71cfc02ec9 (diff)
downloadybd-0f3e02cf891df1a301b83dc054bf0e23a6334f0b.tar.gz
WIP: Cleanup all defunct splitting code
-rwxr-xr-xybd/__main__.py2
-rw-r--r--ybd/assembly.py4
-rw-r--r--ybd/splitting.py220
3 files changed, 11 insertions, 215 deletions
diff --git a/ybd/__main__.py b/ybd/__main__.py
index a7686ea..cc4e39d 100755
--- a/ybd/__main__.py
+++ b/ybd/__main__.py
@@ -22,7 +22,7 @@ import sys
import fcntl
from ybd import app, cache, config, sandbox, repos
from ybd.app import cleanup, RetryException, setup, spawn
-from ybd.assembly import compose, build, install_contents, write_metadata
+from ybd.assembly import compose, build, install_contents
from ybd.cache import get_cache, get_remote
from ybd.deployment import deploy
from ybd.concourse import Pipeline
diff --git a/ybd/assembly.py b/ybd/assembly.py
index 3cced71..44cb36b 100644
--- a/ybd/assembly.py
+++ b/ybd/assembly.py
@@ -24,7 +24,7 @@ from ybd import app, repos, sandbox, config, splitting, utils
from ybd.app import lockfile, RetryException
from ybd.cache import cache, cache_key, get_cache, get_remote
import datetime
-from ybd.splitting import write_metadata, install_split_artifacts, compile_rules
+from ybd.splitting import write_chunk_metafile, compile_rules
from ybd.utils import log, log_riemann, elapsed, timer
from fs.osfs import OSFS
from fs.mountfs import MountFS
@@ -148,7 +148,7 @@ def build(dn, supports, actuator=None):
if dn.get('kind', 'chunk') == 'system':
for x in actuator.get_includes_iterator(dn):
- rules, splits = compile_rules(x)
+ rules = compile_rules(x)
a = get_cache(x) + '.unpacked'
afs = OSFS(a)
meta = os.path.join(dn['baserockdir'], x['name'] + '.meta')
diff --git a/ybd/splitting.py b/ybd/splitting.py
index 058102f..e216ea9 100644
--- a/ybd/splitting.py
+++ b/ybd/splitting.py
@@ -24,137 +24,8 @@ from fs.osfs import OSFS
import itertools
-def install_split_artifacts(dn):
- '''Create the .meta files for a split system
-
- Given a list of artifacts to split, writes new .meta files to
- the baserock dir in dn['install'] and copies the files from the
- sandbox to the dn['install']
-
- '''
- for stratum in dn['contents']:
- move_required_files(dn, stratum, stratum['artifacts'])
-
-
-def move_required_files(dn, stratum, artifacts):
- stratum_metadata = get_metadata(stratum)
- split_stratum_metadata = {}
- if not artifacts:
- # Include all artifacts if no ones were explicitly given for an
- # included stratum on a system.
- artifacts = [p['artifact'] for p in stratum_metadata['products']]
-
- to_keep = [component
- for product in stratum_metadata['products']
- for component in product['components']
- if product['artifact'] in artifacts]
-
- split_stratum_metadata['products'] = (
- [product
- for product in stratum_metadata['products']
- if product['artifact'] in artifacts])
-
- log(dn, 'Installing %s artifacts' % stratum['name'], artifacts)
- log(dn, 'Installing components:', to_keep, verbose=True)
-
- baserockpath = os.path.join(dn['install'], 'baserock')
- if not os.path.isdir(baserockpath):
- os.mkdir(baserockpath)
- split_stratum_metafile = os.path.join(baserockpath,
- stratum['name'] + '.meta')
- with open(split_stratum_metafile, "w") as f:
- yaml.safe_dump(split_stratum_metadata, f, default_flow_style=False)
-
- for chunk in stratum['contents']:
- if chunk.get('build-mode', 'staging') == 'bootstrap':
- continue
-
- try:
- metafile = path_to_metafile(chunk)
- with open(metafile, "r") as f:
- filelist = []
- metadata = yaml.safe_load(f)
- split_metadata = {'ref': metadata.get('ref'),
- 'repo': metadata.get('repo'),
- 'products': []}
- if config.config.get('artifact-version', 0) not in range(0, 1):
- metadata['cache'] = dn.get('cache')
-
- for product in metadata['products']:
- if product['artifact'] in to_keep:
- filelist += product.get('components', [])
- # handle old artifacts still containing 'files'
- filelist += product.get('files', [])
-
- split_metadata['products'].append(product)
-
- if split_metadata['products'] != []:
- split_metafile = os.path.join(baserockpath,
- os.path.basename(metafile))
- with open(split_metafile, "w") as f:
- yaml.safe_dump(split_metadata, f,
- default_flow_style=False)
- log(dn, 'Splits split_metadata is\n', split_metadata,
- )
- log(dn, 'Splits filelist is\n', filelist)
- copy_file_list(dn['sandbox'], dn['install'], filelist)
- except:
- import traceback
- traceback.print_exc()
- log(dn, 'Failed to install split components', exit=True)
-
-
-def check_overlaps(dn):
- if set(config.config['new-overlaps']) <= set(config.config['overlaps']):
- config.config['new-overlaps'] = []
- return
-
- overlaps_found = False
- config.config['new-overlaps'] = list(set(config.config['new-overlaps']))
- for path in config.config['new-overlaps']:
- log(dn, 'WARNING: overlapping path', path)
- for filename in os.listdir(dn['baserockdir']):
- with open(os.path.join(dn['baserockdir'], filename)) as f:
- for line in f:
- if path[1:] in line:
- log(filename, 'WARNING: overlap at', path[1:])
- overlaps_found = True
- break
- if config.config.get('check-overlaps') == 'exit':
- log(dn, 'Overlaps found', config.config['new-overlaps'], exit=True)
- config.config['overlaps'] = list(set(config.config['new-overlaps'] +
- config.config['overlaps']))
- config.config['new-overlaps'] = []
-
-
-def get_metadata(dn):
- '''Load an individual .meta file
-
- The .meta file is expected to be in the .unpacked/baserock directory of the
- built artifact
-
- '''
- try:
- print(path_to_metafile(dn))
- with open(path_to_metafile(dn), "r") as f:
- metadata = yaml.safe_load(f)
- log(dn, 'Loaded metadata', dn['cache'], verbose=True)
- return metadata
- except:
- log(dn, 'WARNING: problem loading metadata', dn)
- return None
-
-
-def path_to_metafile(dn):
- ''' Return the path to metadata file for dn. '''
-
- return os.path.join(get_cache(dn) + '.unpacked', 'baserock',
- dn['name'] + '.meta')
-
-
def compile_rules(dn):
regexps = []
- splits = {}
split_rules = dn.get('products', [])
default_rules = config.defs.defaults['split-rules'][dn.get('kind', 'chunk')]
for rules in split_rules, default_rules:
@@ -164,18 +35,8 @@ def compile_rules(dn):
if artifact.startswith('-'):
artifact = dn['name'] + artifact
regexps.append([artifact, regexp])
- splits[artifact] = []
-
- return regexps, splits
-
-def write_metadata(dn):
- if dn.get('kind', 'chunk') == 'chunk':
- write_chunk_metafile(dn)
- elif dn.get('kind', 'chunk') == 'assemblage':
- write_stratum_metafiles(dn)
- if config.config.get('check-overlaps', 'ignore') != 'ignore':
- check_overlaps(dn)
+ return regexps
def get_splits_for(directory, rules):
@@ -190,85 +51,20 @@ def get_splits_for(directory, rules):
def write_chunk_metafile(chunk, fn):
- '''Writes a chunk .meta file to the baserock dir of the chunk
+ '''Writes a chunk .meta file to the filename given by fn
The split rules are used to divide up the installed files for the chunk
into artifacts in the 'products' list
'''
log(chunk['name'], 'Splitting', chunk.get('kind'))
- rules, splits = compile_rules(chunk)
-
- with chdir(get_cache(chunk) + '.unpacked'):
- for root, dirs, files in os.walk('.', topdown=False):
- for name in files + dirs:
- path = os.path.join(root, name)[2:]
- for artifact, rule in rules:
- if rule.match(path) or rule.match(path + '/'):
- splits[artifact].append(path)
- break
-
- write_metafile(rules, splits, chunk, fn)
-
-
-def write_stratum_metafiles(stratum):
- '''Write the .meta files for a stratum to the baserock dir
-
- The split rules are used to divide up the installed components into
- artifacts in the 'products' list in the stratum .meta file. Each artifact
- contains a list of chunk artifacts which match the stratum splitting rules
-
- '''
-
- log(stratum['name'], 'Splitting', stratum.get('kind'))
- rules, splits = compile_rules(stratum)
-
- for chunk in stratum['contents']:
- if chunk.get('build-mode', 'staging') == 'bootstrap':
- continue
-
- metadata = get_metadata(chunk)
- split_metadata = {'ref': metadata.get('ref'),
- 'repo': metadata.get('repo'),
- 'products': []}
-
- if config.config.get('artifact-version', 0) not in range(0, 1):
- split_metadata['cache'] = metadata.get('cache')
-
- chunk_artifacts = chunk.get('artifacts', {})
- for artifact, target in chunk_artifacts:
- splits[target].append(artifact)
-
- for product in metadata['products']:
- for artifact, rule in rules:
- if rule.match(product['artifact']):
- split_metadata['products'].append(product)
- splits[artifact].append(product['artifact'])
- break
-
- meta = os.path.join(stratum['baserockdir'], chunk['name'] + '.meta')
-
- with open(meta, "w") as f:
- yaml.safe_dump(split_metadata, f, default_flow_style=False)
-
- write_metafile(rules, splits, stratum)
-
-
-def write_metafile(rules, splits, dn, meta):
+ rules = compile_rules(chunk)
+ splits = get_splits_for(get_cache(chunk) + '.unpacked', rules)
metadata = {'products': [{'artifact': a,
'components': sorted(set(splits[a]))}
- for a, r in rules]}
-
- if dn.get('kind', 'chunk') == 'chunk':
- metadata['repo'] = dn.get('repo')
- metadata['ref'] = dn.get('ref')
- else:
- if config.config.get('artifact-version', 0) not in range(0, 2):
- metadata['repo'] = config.config['defdir']
- metadata['ref'] = config.config['def-version']
-
- if config.config.get('artifact-version', 0) not in range(0, 1):
- metadata['cache'] = dn.get('cache')
+ for a, r in rules],
+ 'repo': chunk.get('repo'),
+ 'ref': chunk.get('ref')}
- with open(meta, "w") as f:
+ with open(fn, "w") as f:
yaml.safe_dump(metadata, f, default_flow_style=False)