diff options
author | Daniel Firth <dan.firth@codethink.co.uk> | 2016-11-22 15:38:27 +0000 |
---|---|---|
committer | Daniel Firth <dan.firth@codethink.co.uk> | 2016-11-22 15:38:27 +0000 |
commit | 7716570fe539973b4e140ba279238aff095e96d8 (patch) | |
tree | 636e08cb60d53049643a0dcd97fede55d0f36a2e | |
parent | 0f3e02cf891df1a301b83dc054bf0e23a6334f0b (diff) | |
download | ybd-staging/V10-new.tar.gz |
Remove obsolete caching codestaging/V10-new
-rw-r--r-- | ybd/cache.py | 110 |
1 files changed, 0 insertions, 110 deletions
diff --git a/ybd/cache.py b/ybd/cache.py index b5c0924..c7f4f26 100644 --- a/ybd/cache.py +++ b/ybd/cache.py @@ -31,119 +31,9 @@ import re def cache_key(dn): - if dn is None: - log(dn, 'No definition found for', dn, exit=True) - - if type(dn) is not dict: - dn = config.defs.get(dn) - - if dn.get('cache') == 'calculating': - log(dn, 'Recursion loop for', dn, exit=True) - - if dn.get('cache'): - return dn['cache'] - - if dn.get('arch', config.config['arch']) != config.config['arch']: - if 'tried' not in dn: - dn['tried'] = True - log(dn, 'No cache_key for arch %s mismatch' % dn['arch'], - config.config['arch']) - return False - - dn['cache'] = 'calculating' - - key = 'no-build' - if config.config.get('mode', 'normal') in ['keys-only', 'normal']: - if dn.get('repo') and not dn.get('tree'): - dn['tree'] = get_tree(dn) - factors = hash_factors(dn) - factors = json.dumps(factors, sort_keys=True).encode('utf-8') - key = hashlib.sha256(factors).hexdigest() - - dn['cache'] = dn['name'] + "." + key - - config.config['total'] += 1 - x = 'x' - if not get_cache(dn): - x = ' ' - config.config['tasks'] += 1 - - if dn.get('kind', 'chunk') == 'chunk': - config.config['chunks'] += 1 - if dn.get('kind', 'chunk') == 'stratum': - config.config['strata'] += 1 - if dn.get('kind', 'chunk') == 'system': - config.config['systems'] += 1 - - log('CACHE-KEYS', '[%s]' % x, dn['cache']) - if config.config.get('manifest', False): - update_manifest(dn, config.config['manifest']) - - if 'keys' in config.config: - config.config['keys'] += [dn['cache']] return dn['cache'] -def hash_factors(dn): - hash_factors = {'arch': config.config['arch']} - - for factor in dn.get('build-depends', []): - hash_factors[factor] = cache_key(factor) - - for factor in dn.get('contents', []): - key = list(factor.keys())[0] - hash_factors[key] = cache_key(key) - - relevant_factors = ['tree', 'submodules'] + \ - config.defs.defaults['build-steps'] - if config.config.get('artifact-version', False) not in range(0, 6): - relevant_factors += ['devices'] - - for factor in relevant_factors: - if dn.get(factor): - hash_factors[factor] = dn[factor] - - if dn.get('kind') == 'system': - if config.config.get('default-splits', []) != []: - hash_factors['splits'] = config.config.get('default-splits') - - def hash_system_recursively(system): - factor = system.get('path', 'BROKEN') - hash_factors[factor] = cache_key(factor) - for subsystem in system.get('subsystems', []): - hash_system_recursively(subsystem) - - if dn.get('kind') == 'cluster': - for system in dn.get('systems', []): - hash_system_recursively(system) - - if config.config.get('artifact-version', False): - hash_factors['artifact-version'] = config.config['artifact-version'] - - if config.config.get('artifact-version', 0) in range(0, 2): - # this way, any change to any build-system invalidates all caches - hash_factors['default-build-systems'] = \ - config.defs.defaults.build_systems - else: - # this way is better - only affected components get a new key - hash_factors['default-build-systems'] = \ - config.defs.defaults.build_systems.get(dn.get('build-system', - 'manual')) - if (config.config.get('default-splits', []) != [] and - dn.get('kind') == 'system'): - hash_factors['default-splits'] = \ - config.config['default-splits'] - - if config.config.get('artifact-version', 0) not in range(0, 7): - if dn.get('max-jobs'): - if dn['max-jobs'] == 1: - hash_factors['max-jobs'] = 'single' - else: - hash_factors['max-jobs'] = 'parallel' - - return hash_factors - - def cache(dn): if get_cache(dn): log(dn, "Bah! I could have cached", cache_key(dn)) |