summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJavier Jardón <jjardon@gnome.org>2016-11-02 14:50:25 +0000
committerJavier Jardón <jjardon@gnome.org>2016-11-02 17:57:46 +0000
commitd4187063349a9108eaa650a4462077dc6072928f (patch)
treea49650566fe505098f5b93103b1830cb2344982b
parent6cc1471da349f65b3f59f7381da56ba07536dbc0 (diff)
downloadybd-d4187063349a9108eaa650a4462077dc6072928f.tar.gz
Move log functions to utils.py
-rw-r--r--ybd/app.py52
-rw-r--r--ybd/assembly.py4
-rw-r--r--ybd/cache.py47
-rw-r--r--ybd/defaults.py3
-rw-r--r--ybd/deployment.py9
-rw-r--r--ybd/morphs.py4
-rw-r--r--ybd/pots.py3
-rw-r--r--ybd/release_note.py3
-rw-r--r--ybd/repos.py51
-rw-r--r--ybd/sandbox.py40
-rw-r--r--ybd/splitting.py4
-rw-r--r--ybd/utils.py59
12 files changed, 145 insertions, 134 deletions
diff --git a/ybd/app.py b/ybd/app.py
index be27e5f..50584c9 100644
--- a/ybd/app.py
+++ b/ybd/app.py
@@ -27,6 +27,7 @@ from subprocess import call
from fs.osfs import OSFS # not used here, but we import it to check install
from ybd.repos import get_version
from ybd.cache import cache_key
+from ybd.utils import log
try:
from riemann_client.transport import TCPTransport
from riemann_client.client import QueuedClient
@@ -77,48 +78,6 @@ def lockfile(dn):
return os.path.join(config['tmp'], cache_key(dn) + '.lock')
-def log(dn, message='', data='', verbose=False, exit=False):
- ''' Print a timestamped log. '''
-
- if exit:
- print('\n\n')
- message = 'ERROR: ' + message.replace('WARNING: ', '')
-
- if verbose is True and config.get('log-verbose', False) is False:
- return
-
- name = dn['name'] if type(dn) is dict else dn
-
- timestamp = datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S ')
- if config.get('log-timings') == 'elapsed':
- timestamp = timestamp[:9] + elapsed(config['start-time']) + ' '
- if config.get('log-timings', 'omit') == 'omit':
- timestamp = ''
- progress = ''
- if config.get('counter'):
- count = config['counter'].get()
- progress = '[%s/%s/%s] ' % (count, config['tasks'], config['total'])
- entry = '%s%s[%s] %s %s\n' % (timestamp, progress, name, message, data)
- if config.get('instances'):
- entry = str(config.get('fork', 0)) + ' ' + entry
-
- print(entry),
- sys.stdout.flush()
-
- if exit:
- print('\n\n')
- os._exit(1)
-
-
-def log_env(log, env, message=''):
- with open(log, "a") as logfile:
- for key in sorted(env):
- msg = env[key] if 'PASSWORD' not in key else '(hidden)'
- logfile.write('%s=%s\n' % (key, msg))
- logfile.write(message + '\n\n')
- logfile.flush()
-
-
def warning_handler(message, category, filename, lineno, file=None, line=None):
'''Output messages from warnings.warn() - default output is a bit ugly.'''
@@ -292,7 +251,7 @@ def timer(dn, message=''):
except:
raise
text = '' if message == '' else ' for ' + message
- time_elapsed = elapsed(starttime)
+ time_elapsed = utils.elapsed(starttime)
log(dn, 'Elapsed time' + text, time_elapsed)
log_riemann(dn, 'Timer', text, time_elapsed)
@@ -309,13 +268,6 @@ def log_riemann(dn, service, text, time_elapsed):
client.flush()
-def elapsed(starttime):
- td = datetime.datetime.now() - starttime
- hours, remainder = divmod(int(td.total_seconds()), 60*60)
- minutes, seconds = divmod(remainder, 60)
- return "%02d:%02d:%02d" % (hours, minutes, seconds)
-
-
def spawn():
for fork in range(1, config.get('instances')):
if os.fork() == 0:
diff --git a/ybd/assembly.py b/ybd/assembly.py
index 6dcb051..b8ba4aa 100644
--- a/ybd/assembly.py
+++ b/ybd/assembly.py
@@ -21,12 +21,12 @@ import fcntl
import errno
from ybd import app, repos, sandbox
-from ybd.app import config, timer, elapsed
+from ybd.app import config, timer
from ybd.app import log, log_riemann, lockfile, RetryException
from ybd.cache import cache, cache_key, get_cache, get_remote
import datetime
from ybd.splitting import write_metadata, install_split_artifacts
-
+from ybd.utils import elapsed
def compose(dn):
'''Work through defs tree, building and assembling until target exists'''
diff --git a/ybd/cache.py b/ybd/cache.py
index 6d355a5..7934384 100644
--- a/ybd/cache.py
+++ b/ybd/cache.py
@@ -23,6 +23,7 @@ import shutil
from subprocess import call
from ybd import utils
+from ybd.utils import log
from ybd.repos import get_repo_url, get_tree
import tempfile
import yaml
@@ -31,13 +32,13 @@ import re
def cache_key(dn):
if dn is None:
- app.log(dn, 'No definition found for', dn, exit=True)
+ log(dn, 'No definition found for', dn, exit=True)
if type(dn) is not dict:
dn = app.defs.get(dn)
if dn.get('cache') == 'calculating':
- app.log(dn, 'Recursion loop for', dn, exit=True)
+ log(dn, 'Recursion loop for', dn, exit=True)
if dn.get('cache'):
return dn['cache']
@@ -45,7 +46,7 @@ def cache_key(dn):
if dn.get('arch', app.config['arch']) != app.config['arch']:
if 'tried' not in dn:
dn['tried'] = True
- app.log(dn, 'No cache_key for arch %s mismatch' % dn['arch'],
+ log(dn, 'No cache_key for arch %s mismatch' % dn['arch'],
app.config['arch'])
return False
@@ -74,7 +75,7 @@ def cache_key(dn):
if dn.get('kind', 'chunk') == 'system':
app.config['systems'] += 1
- app.log('CACHE-KEYS', '[%s]' % x, dn['cache'])
+ log('CACHE-KEYS', '[%s]' % x, dn['cache'])
if app.config.get('manifest', False):
update_manifest(dn, app.config['manifest'])
@@ -143,7 +144,7 @@ def hash_factors(dn):
def cache(dn):
if get_cache(dn):
- app.log(dn, "Bah! I could have cached", cache_key(dn))
+ log(dn, "Bah! I could have cached", cache_key(dn))
return
tempfile.tempdir = app.config['tmp']
tmpdir = tempfile.mkdtemp()
@@ -196,25 +197,25 @@ def unpack(dn, tmpfile):
unpackdir = tmpfile + '.unpacked'
os.makedirs(unpackdir)
if call(['tar', 'xf', tmpfile, '--directory', unpackdir]):
- app.log(dn, 'Problem unpacking', tmpfile, exit=True)
+ log(dn, 'Problem unpacking', tmpfile, exit=True)
else:
with open(os.devnull, "w") as fnull:
if call(['tar', 'tvf', tmpfile], stdout=fnull, stderr=fnull):
- app.log(dn, 'Problem with tarfile', tmpfile, exit=True)
+ log(dn, 'Problem with tarfile', tmpfile, exit=True)
try:
path = os.path.join(app.config['artifacts'], cache_key(dn))
shutil.move(os.path.dirname(tmpfile), path)
if not os.path.isdir(path):
- app.log(dn, 'Problem creating artifact', path, exit=True)
+ log(dn, 'Problem creating artifact', path, exit=True)
size = os.path.getsize(get_cache(dn))
size = re.sub("(\d)(?=(\d{3})+(?!\d))", r"\1,", "%d" % size)
checksum = md5(get_cache(dn))
- app.log(dn, 'Cached %s bytes %s as' % (size, checksum), cache_key(dn))
+ log(dn, 'Cached %s bytes %s as' % (size, checksum), cache_key(dn))
return path
except:
- app.log(dn, 'Bah! I raced on', cache_key(dn))
+ log(dn, 'Bah! I raced on', cache_key(dn))
shutil.rmtree(os.path.dirname(tmpfile))
return False
@@ -229,23 +230,23 @@ def upload(dn):
try:
response = requests.post(url=url, data=params, files={"file": f})
if response.status_code == 201:
- app.log(dn, 'Uploaded %s to' % dn['cache'], url)
+ log(dn, 'Uploaded %s to' % dn['cache'], url)
return
if response.status_code == 777:
- app.log(dn, 'Reproduced %s at' % md5(cachefile), dn['cache'])
+ log(dn, 'Reproduced %s at' % md5(cachefile), dn['cache'])
app.config['reproduced'].append([md5(cachefile), dn['cache']])
return
if response.status_code == 405:
# server has different md5 for this artifact
if dn['kind'] == 'stratum' and app.config['reproduce']:
- app.log('BIT-FOR-BIT',
+ log('BIT-FOR-BIT',
'WARNING: reproduction failed for', dn['cache'])
- app.log(dn, 'Artifact server already has', dn['cache'])
+ log(dn, 'Artifact server already has', dn['cache'])
return
- app.log(dn, 'Artifact server problem:', response.status_code)
+ log(dn, 'Artifact server problem:', response.status_code)
except:
pass
- app.log(dn, 'Failed to upload', dn['cache'])
+ log(dn, 'Failed to upload', dn['cache'])
def get_cache(dn):
@@ -263,7 +264,7 @@ def get_cache(dn):
tempfile.tempdir = app.config['tmp']
tmpdir = tempfile.mkdtemp()
if call(['tar', 'xf', artifact, '--directory', tmpdir]):
- app.log(dn, 'Problem unpacking', artifact)
+ log(dn, 'Problem unpacking', artifact)
return False
try:
shutil.move(tmpdir, unpackdir)
@@ -288,12 +289,12 @@ def get_remote(dn):
return False
try:
- app.log(dn, 'Try downloading', cache_key(dn))
+ log(dn, 'Try downloading', cache_key(dn))
url = app.config['kbas-url'] + 'get/' + cache_key(dn)
response = requests.get(url=url, stream=True)
except:
app.config.pop('kbas-url')
- app.log(dn, 'WARNING: remote artifact server is not working')
+ log(dn, 'WARNING: remote artifact server is not working')
return False
if response.status_code == 200:
@@ -307,7 +308,7 @@ def get_remote(dn):
return unpack(dn, cachefile)
except:
- app.log(dn, 'WARNING: failed downloading', cache_key(dn))
+ log(dn, 'WARNING: failed downloading', cache_key(dn))
return False
@@ -322,9 +323,9 @@ def cull(artifact_dir):
stat = os.statvfs(artifact_dir)
free = stat.f_frsize * stat.f_bavail / 1000000000
if free >= app.config.get('min-gigabytes', 10):
- app.log('SETUP', '%sGB is enough free space' % free)
+ log('SETUP', '%sGB is enough free space' % free)
if deleted > 0:
- app.log('SETUP', 'Culled %s items in' % deleted,
+ log('SETUP', 'Culled %s items in' % deleted,
artifact_dir)
return True
path = os.path.join(artifact_dir, artifact)
@@ -348,7 +349,7 @@ def cull(artifact_dir):
stat = os.statvfs(artifact_dir)
free = stat.f_frsize * stat.f_bavail / 1000000000
if free < app.config.get('min-gigabytes', 10):
- app.log('SETUP', '%sGB is less than min-gigabytes:' % free,
+ log('SETUP', '%sGB is less than min-gigabytes:' % free,
app.config.get('min-gigabytes', 10), exit=True)
diff --git a/ybd/defaults.py b/ybd/defaults.py
index 52fc16d..d263bdd 100644
--- a/ybd/defaults.py
+++ b/ybd/defaults.py
@@ -26,6 +26,7 @@ These definitions shall be used if no DEFAULTS file is present.
import os
from ybd import app
+from ybd.utils import log
import yaml
@@ -61,7 +62,7 @@ class Defaults(object):
contents = yaml.safe_load(f)
except:
if ignore_errors:
- app.log('DEFAULTS', 'WARNING: problem loading', path)
+ log('DEFAULTS', 'WARNING: problem loading', path)
return None
else:
raise
diff --git a/ybd/deployment.py b/ybd/deployment.py
index a802be6..7d3b150 100644
--- a/ybd/deployment.py
+++ b/ybd/deployment.py
@@ -18,6 +18,7 @@ import os
from subprocess import call
import json
from ybd import app, cache, sandbox
+from ybd.utils import log
def deploy(target):
@@ -41,12 +42,12 @@ def deploy_system(system_spec, parent_location=''):
'''
system = app.defs.get(system_spec['path'])
if not cache.get_cache(system):
- app.log('DEPLOY', 'System is not built, cannot deploy:\n', system,
+ log('DEPLOY', 'System is not built, cannot deploy:\n', system,
exit=True)
deploy_defaults = system_spec.get('deploy-defaults')
with sandbox.setup(system):
- app.log(system, 'Extracting system artifact into', system['sandbox'])
+ log(system, 'Extracting system artifact into', system['sandbox'])
with open(cache.get_cache(system), 'r') as artifact:
call(['tar', 'x', '--directory', system['sandbox']],
stdin=artifact)
@@ -70,7 +71,7 @@ def deploy_system(system_spec, parent_location=''):
try:
sandbox.run_extension(system, deployment, 'check', method)
except KeyError:
- app.log(system, "Couldn't find a check extension for", method)
+ log(system, "Couldn't find a check extension for", method)
for ext in system.get('configuration-extensions', []):
sandbox.run_extension(system, deployment, 'configure',
@@ -80,7 +81,7 @@ def deploy_system(system_spec, parent_location=''):
def do_deployment_manifest(system, configuration):
- app.log(system, "Creating deployment manifest in", system['sandbox'])
+ log(system, "Creating deployment manifest in", system['sandbox'])
data = {'configuration': configuration}
metafile = os.path.join(system['sandbox'], 'baserock', 'deployment.meta')
with app.chdir(system['sandbox']), open(metafile, "w") as f:
diff --git a/ybd/morphs.py b/ybd/morphs.py
index 5c6b6cb..6c9df9f 100644
--- a/ybd/morphs.py
+++ b/ybd/morphs.py
@@ -17,8 +17,10 @@
import yaml
import glob
import os
-from ybd.app import chdir, config, log
+from ybd.app import chdir, donfig
+from ybd.config import config
from ybd.defaults import Defaults
+from ybd.utils import log
class Morphs(object):
diff --git a/ybd/pots.py b/ybd/pots.py
index 2444b70..7e1ad09 100644
--- a/ybd/pots.py
+++ b/ybd/pots.py
@@ -17,9 +17,10 @@
import os
import yaml
from ybd import app
-from ybd.app import config, log
+from ybd.app import config
from ybd.defaults import Defaults
from ybd.morphs import Morphs
+from ybd.utils import log
# copied from http://stackoverflow.com/questions/21016220
diff --git a/ybd/release_note.py b/ybd/release_note.py
index 68d20fd..4fbfa39 100644
--- a/ybd/release_note.py
+++ b/ybd/release_note.py
@@ -18,10 +18,11 @@ import os
from subprocess import check_output
import tempfile
from ybd import app
-from ybd.app import chdir, config, log
+from ybd.app import chdir, config
from ybd.morphs import Morphs
from ybd.repos import explore, get_last_tag, get_repo_name
from ybd.repos import mirror, mirror_has_ref
+from ybd.utils import log
def do_release_note(release_note):
diff --git a/ybd/repos.py b/ybd/repos.py
index 62337a6..5c56eba 100644
--- a/ybd/repos.py
+++ b/ybd/repos.py
@@ -23,6 +23,7 @@ from subprocess import call, check_output
import sys
import requests
from ybd import utils
+from ybd.utils import log
import tempfile
@@ -92,7 +93,7 @@ def get_tree(dn):
if dn['repo'].startswith('file://') or dn['repo'].startswith('/'):
gitdir = dn['repo'].replace('file://', '')
if not os.path.isdir(gitdir):
- app.log(dn, 'Git repo not found:', dn['repo'], exit=True)
+ log(dn, 'Git repo not found:', dn['repo'], exit=True)
if not os.path.exists(gitdir):
try:
@@ -101,7 +102,7 @@ def get_tree(dn):
return r.json()['tree']
except:
if app.config.get('tree-server'):
- app.log(dn, 'WARNING: no tree from tree-server for', ref)
+ log(dn, 'WARNING: no tree from tree-server for', ref)
mirror(dn['name'], dn['repo'])
@@ -109,7 +110,7 @@ def get_tree(dn):
if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
stderr=fnull):
# can't resolve ref. is it upstream?
- app.log(dn, 'Fetching from upstream to resolve %s' % ref)
+ log(dn, 'Fetching from upstream to resolve %s' % ref)
update_mirror(dn['name'], dn['repo'], gitdir)
try:
@@ -120,7 +121,7 @@ def get_tree(dn):
except:
# either we don't have a git dir, or ref is not unique
# or ref does not exist
- app.log(dn, 'No tree for ref', (ref, gitdir), exit=True)
+ log(dn, 'No tree for ref', (ref, gitdir), exit=True)
def mirror(name, repo):
@@ -129,7 +130,7 @@ def mirror(name, repo):
repo_url = get_repo_url(repo)
try:
tar_file = get_repo_name(repo_url) + '.tar'
- app.log(name, 'Try fetching tarball %s' % tar_file)
+ log(name, 'Try fetching tarball %s' % tar_file)
# try tarball first
with app.chdir(tmpdir), open(os.devnull, "w") as fnull:
call(['wget', os.path.join(app.config['tar-url'], tar_file)],
@@ -138,19 +139,19 @@ def mirror(name, repo):
os.remove(tar_file)
update_mirror(name, repo, tmpdir)
except:
- app.log(name, 'Try git clone from', repo_url)
+ log(name, 'Try git clone from', repo_url)
with open(os.devnull, "w") as fnull:
if call(['git', 'clone', '--mirror', '-n', repo_url, tmpdir]):
- app.log(name, 'Failed to clone', repo, exit=True)
+ log(name, 'Failed to clone', repo, exit=True)
with app.chdir(tmpdir):
if call(['git', 'rev-parse']):
- app.log(name, 'Problem mirroring git repo at', tmpdir, exit=True)
+ log(name, 'Problem mirroring git repo at', tmpdir, exit=True)
gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
try:
shutil.move(tmpdir, gitdir)
- app.log(name, 'Git repo is mirrored at', gitdir)
+ log(name, 'Git repo is mirrored at', gitdir)
except:
pass
@@ -168,11 +169,11 @@ def mirror_has_ref(gitdir, ref):
def update_mirror(name, repo, gitdir):
with app.chdir(gitdir), open(os.devnull, "w") as fnull:
- app.log(name, 'Refreshing mirror for %s' % repo)
+ log(name, 'Refreshing mirror for %s' % repo)
repo_url = get_repo_url(repo)
if call(['git', 'fetch', repo_url, '+refs/*:refs/*', '--prune'],
stdout=fnull, stderr=fnull):
- app.log(name, 'Git update mirror failed', repo, exit=True)
+ log(name, 'Git update mirror failed', repo, exit=True)
def checkout(dn):
@@ -200,15 +201,15 @@ def _checkout(name, repo, ref, checkout):
# removed --no-hardlinks, though.
if call(['git', 'clone', '--no-hardlinks', gitdir, checkout],
stdout=fnull, stderr=fnull):
- app.log(name, 'Git clone failed for', ref, exit=True)
+ log(name, 'Git clone failed for', ref, exit=True)
with app.chdir(checkout):
if call(['git', 'checkout', '--force', ref], stdout=fnull,
stderr=fnull):
- app.log(name, 'Git checkout failed for', ref, exit=True)
+ log(name, 'Git checkout failed for', ref, exit=True)
- app.log(name, 'Git checkout %s in %s' % (repo, checkout))
- app.log(name, 'Upstream version %s' % get_version(checkout, ref))
+ log(name, 'Git checkout %s in %s' % (repo, checkout))
+ log(name, 'Upstream version %s' % get_version(checkout, ref))
def source_date_epoch(checkout):
@@ -233,19 +234,19 @@ def extract_commit(name, repo, ref, target_dir):
git_env['GIT_INDEX_FILE'] = git_index_file.name
git_env['GIT_WORK_TREE'] = target_dir
- app.log(name, 'Extracting commit', ref)
+ log(name, 'Extracting commit', ref)
if call(['git', 'read-tree', ref], env=git_env, cwd=gitdir):
- app.log(name, 'git read-tree failed for', ref, exit=True)
- app.log(name, 'Then checkout index', ref)
+ log(name, 'git read-tree failed for', ref, exit=True)
+ log(name, 'Then checkout index', ref)
if call(['git', 'checkout-index', '--all'], env=git_env, cwd=gitdir):
- app.log(name, 'Git checkout-index failed for', ref, exit=True)
- app.log(name, 'Done', ref)
+ log(name, 'Git checkout-index failed for', ref, exit=True)
+ log(name, 'Done', ref)
utils.set_mtime_recursively(target_dir)
def checkout_submodules(dn):
- app.log(dn, 'Checking git submodules')
+ log(dn, 'Checking git submodules')
with open('.gitmodules', "r") as gitfile:
# drop indentation in sections, as RawConfigParser cannot handle it
content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
@@ -259,10 +260,10 @@ def checkout_submodules(dn):
path = parser.get(section, 'path')
try:
url = dn['submodules'][path]['url']
- app.log(dn, 'Processing submodule %s from' % path, url)
+ log(dn, 'Processing submodule %s from' % path, url)
except:
url = parser.get(section, 'url')
- app.log(dn, 'WARNING: fallback to submodule %s from' % path, url)
+ log(dn, 'WARNING: fallback to submodule %s from' % path, url)
try:
# list objects in the parent repo tree to find the commit
@@ -282,11 +283,11 @@ def checkout_submodules(dn):
_checkout(dn['name'], url, submodule_commit, fulldir)
else:
- app.log(dn, 'Skipping submodule %s, not a commit:' % path,
+ log(dn, 'Skipping submodule %s, not a commit:' % path,
fields)
except:
- app.log(dn, "Git submodules problem", exit=True)
+ log(dn, "Git submodules problem", exit=True)
@contextlib.contextmanager
diff --git a/ybd/sandbox.py b/ybd/sandbox.py
index 8d869c2..1f489c0 100644
--- a/ybd/sandbox.py
+++ b/ybd/sandbox.py
@@ -26,7 +26,7 @@ from subprocess import call, PIPE
from ybd import app, cache, utils
from ybd.repos import get_repo_url
-
+from ybd.utils import log
# This must be set to a sandboxlib backend before the run_sandboxed() function
# can be used.
@@ -59,13 +59,13 @@ def setup(dn):
raise e
except:
import traceback
- app.log(dn, 'ERROR: surprise exception in sandbox', '')
+ log(dn, 'ERROR: surprise exception in sandbox', '')
traceback.print_exc()
- app.log(dn, 'Sandbox debris is at', dn['sandbox'], exit=True)
+ log(dn, 'Sandbox debris is at', dn['sandbox'], exit=True)
finally:
pass
- app.log(dn, "Removing sandbox dir", dn['sandbox'], verbose=True)
+ log(dn, "Removing sandbox dir", dn['sandbox'], verbose=True)
app.remove_dir(dn['sandbox'])
@@ -74,9 +74,9 @@ def install(dn, component):
if os.path.exists(os.path.join(dn['sandbox'], 'baserock',
component['name'] + '.meta')):
return
- app.log(dn, 'Sandbox: installing %s' % component['cache'], verbose=True)
+ log(dn, 'Sandbox: installing %s' % component['cache'], verbose=True)
if cache.get_cache(component) is False:
- app.log(dn, 'Unable to get cache for', component['name'], exit=True)
+ log(dn, 'Unable to get cache for', component['name'], exit=True)
unpackdir = cache.get_cache(component) + '.unpacked'
if dn.get('kind') is 'system':
utils.copy_all_files(unpackdir, dn['sandbox'])
@@ -93,7 +93,7 @@ def ldconfig(dn):
run_logged(dn, cmd_list)
os.environ['PATH'] = path
else:
- app.log(dn, 'No %s, not running ldconfig' % conf)
+ log(dn, 'No %s, not running ldconfig' % conf)
def argv_to_string(argv):
@@ -103,7 +103,7 @@ def argv_to_string(argv):
def run_sandboxed(dn, command, env=None, allow_parallel=False):
global executor
- app.log(dn, 'Running command:\n%s' % command)
+ log(dn, 'Running command:\n%s' % command)
with open(dn['log'], "a") as logfile:
logfile.write("# # %s\n" % command)
@@ -177,15 +177,15 @@ def run_sandboxed(dn, command, env=None, allow_parallel=False):
except:
import traceback
traceback.print_exc()
- app.log('SANDBOX', 'ERROR: in run_sandbox_with_redirection',
+ log('SANDBOX', 'ERROR: in run_sandbox_with_redirection',
exit_code)
if exit_code != 0:
- app.log(dn, 'ERROR: command failed in directory %s:\n\n' %
+ log(dn, 'ERROR: command failed in directory %s:\n\n' %
os.getcwd(), argv_to_string(argv))
call(['tail', '-n', '200', dn['log']])
- app.log(dn, 'ERROR: log file is at', dn['log'])
- app.log(dn, 'Sandbox debris is at', dn['sandbox'], exit=True)
+ log(dn, 'ERROR: log file is at', dn['log'])
+ log(dn, 'Sandbox debris is at', dn['sandbox'], exit=True)
finally:
if cur_makeflags is not None:
env['MAKEFLAGS'] = cur_makeflags
@@ -195,14 +195,14 @@ def run_logged(dn, cmd_list):
app.log_env(dn['log'], os.environ, argv_to_string(cmd_list))
with open(dn['log'], "a") as logfile:
if call(cmd_list, stdin=PIPE, stdout=logfile, stderr=logfile):
- app.log(dn, 'ERROR: command failed in directory %s:\n\n' %
+ log(dn, 'ERROR: command failed in directory %s:\n\n' %
os.getcwd(), argv_to_string(cmd_list))
call(['tail', '-n', '200', dn['log']])
- app.log(dn, 'Log file is at', dn['log'], exit=True)
+ log(dn, 'Log file is at', dn['log'], exit=True)
def run_extension(dn, deployment, step, method):
- app.log(dn, 'Running %s extension:' % step, method)
+ log(dn, 'Running %s extension:' % step, method)
extensions = utils.find_extensions()
tempfile.tempdir = app.config['tmp']
cmd_tmp = tempfile.NamedTemporaryFile(delete=False)
@@ -237,7 +237,7 @@ def run_extension(dn, deployment, step, method):
os.chmod(cmd_tmp.name, 0o700)
if call(command):
- app.log(dn, 'ERROR: %s extension failed:' % step, cmd_bin)
+ log(dn, 'ERROR: %s extension failed:' % step, cmd_bin)
raise SystemExit
finally:
os.remove(cmd_tmp.name)
@@ -336,18 +336,18 @@ def create_devices(dn):
raise IOError('Cannot create device node %s,'
'unrecognized device type "%s"'
% (destfile, device['type']))
- app.log(dn, "Creating device node", destfile)
+ log(dn, "Creating device node", destfile)
os.mknod(destfile, mode, os.makedev(device['major'], device['minor']))
os.chown(destfile, device['uid'], device['gid'])
def list_files(component):
- app.log(component, 'Sandbox %s contains\n' % component['sandbox'],
+ log(component, 'Sandbox %s contains\n' % component['sandbox'],
os.listdir(component['sandbox']))
try:
files = os.listdir(os.path.join(component['sandbox'], 'baserock'))
- app.log(component,
+ log(component,
'Baserock directory contains %s items\n' % len(files),
sorted(files))
except:
- app.log(component, 'No baserock directory in', component['sandbox'])
+ log(component, 'No baserock directory in', component['sandbox'])
diff --git a/ybd/splitting.py b/ybd/splitting.py
index efd819c..1f11615 100644
--- a/ybd/splitting.py
+++ b/ybd/splitting.py
@@ -15,12 +15,12 @@
# =*= License: GPL-2 =*=
from ybd import app
-from ybd.app import config, log, chdir
+from ybd.app import config, chdir
from ybd.cache import get_cache
import os
import re
import yaml
-from ybd.utils import copy_file_list
+from ybd.utils import copy_file_list, log
def install_split_artifacts(dn):
diff --git a/ybd/utils.py b/ybd/utils.py
index fa5b96e..62d3910 100644
--- a/ybd/utils.py
+++ b/ybd/utils.py
@@ -14,12 +14,14 @@
#
# =*= License: GPL-2 =*=
+import datetime
import gzip
import tarfile
import contextlib
import os
import shutil
import stat
+import sys
from fs.osfs import OSFS
from fs.multifs import MultiFS
import calendar
@@ -132,9 +134,16 @@ def hardlink_all_files(srcpath, destpath):
_process_tree(destpath, srcpath, destpath, os.link)
+def elapsed(starttime):
+ td = datetime.datetime.now() - starttime
+ hours, remainder = divmod(int(td.total_seconds()), 60*60)
+ minutes, seconds = divmod(remainder, 60)
+ return "%02d:%02d:%02d" % (hours, minutes, seconds)
+
+
def _process_tree(root, srcpath, destpath, actionfunc):
if os.path.lexists(destpath):
- app.log('OVERLAPS', 'WARNING: overlap at', destpath, verbose=True)
+ log('OVERLAPS', 'WARNING: overlap at', destpath, verbose=True)
file_stat = os.lstat(srcpath)
mode = file_stat.st_mode
@@ -152,7 +161,7 @@ def _process_tree(root, srcpath, destpath, actionfunc):
print('destpath is', destpath)
print('realpath is', realpath)
- app.log('UTILS', 'ERROR: file operation failed', exit=True)
+ log('UTILS', 'ERROR: file operation failed', exit=True)
if not stat.S_ISDIR(dest_stat.st_mode):
raise IOError('Destination not a directory: source has %s'
@@ -262,8 +271,8 @@ def _process_list(srcdir, destdir, filelist, actionfunc):
file_stat = os.lstat(srcpath)
mode = file_stat.st_mode
except UnicodeEncodeError as ue:
- app.log("UnicodeErr",
- "Couldn't get lstat info for '%s'." % srcpath)
+ log("UnicodeErr",
+ "Couldn't get lstat info for '%s'." % srcpath)
raise ue
if stat.S_ISDIR(mode):
@@ -409,3 +418,45 @@ def monkeypatch(obj, attr, new_value):
setattr(obj, attr, new_value)
yield
setattr(obj, attr, old_value)
+
+
+def log(dn, message='', data='', verbose=False, exit=False):
+ ''' Print a timestamped log. '''
+
+ if exit:
+ print('\n\n')
+ message = 'ERROR: ' + message.replace('WARNING: ', '')
+
+ if verbose is True and config.get('log-verbose', False) is False:
+ return
+
+ name = dn['name'] if type(dn) is dict else dn
+
+ timestamp = datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S ')
+ if config.get('log-timings') == 'elapsed':
+ timestamp = timestamp[:9] + elapsed(config['start-time']) + ' '
+ if config.get('log-timings', 'omit') == 'omit':
+ timestamp = ''
+ progress = ''
+ if config.get('counter'):
+ count = config['counter'].get()
+ progress = '[%s/%s/%s] ' % (count, config['tasks'], config['total'])
+ entry = '%s%s[%s] %s %s\n' % (timestamp, progress, name, message, data)
+ if config.get('instances'):
+ entry = str(config.get('fork', 0)) + ' ' + entry
+
+ print(entry),
+ sys.stdout.flush()
+
+ if exit:
+ print('\n\n')
+ os._exit(1)
+
+
+def log_env(log, env, message=''):
+ with open(log, "a") as logfile:
+ for key in sorted(env):
+ msg = env[key] if 'PASSWORD' not in key else '(hidden)'
+ logfile.write('%s=%s\n' % (key, msg))
+ logfile.write(message + '\n\n')
+ logfile.flush()