From 72bcdbd0a0c8cc6aa2a7433169aa49c7fc19b55b Mon Sep 17 00:00:00 2001 From: Sebastian Thiel Date: Thu, 4 Mar 2010 09:06:03 +0100 Subject: Converted all tabs to 4 space characters each to comply with pep8 --- ez_setup.py | 322 ++--- lib/git/__init__.py | 2 +- lib/git/actor.py | 106 +- lib/git/cmd.py | 742 ++++++------ lib/git/config.py | 784 ++++++------- lib/git/diff.py | 668 +++++------ lib/git/errors.py | 34 +- lib/git/index.py | 2644 +++++++++++++++++++++--------------------- lib/git/objects/__init__.py | 2 +- lib/git/objects/base.py | 416 +++---- lib/git/objects/blob.py | 42 +- lib/git/objects/commit.py | 698 +++++------ lib/git/objects/tag.py | 124 +- lib/git/objects/tree.py | 454 ++++---- lib/git/objects/utils.py | 296 ++--- lib/git/refs.py | 1826 ++++++++++++++--------------- lib/git/remote.py | 1520 ++++++++++++------------ lib/git/repo.py | 1522 ++++++++++++------------ lib/git/stats.py | 104 +- lib/git/utils.py | 716 ++++++------ setup.py | 106 +- test/git/test_actor.py | 44 +- test/git/test_base.py | 160 +-- test/git/test_blob.py | 36 +- test/git/test_commit.py | 304 ++--- test/git/test_config.py | 182 +-- test/git/test_diff.py | 148 +-- test/git/test_git.py | 128 +- test/git/test_index.py | 1022 ++++++++-------- test/git/test_performance.py | 74 +- test/git/test_refs.py | 722 ++++++------ test/git/test_remote.py | 844 +++++++------- test/git/test_repo.py | 508 ++++---- test/git/test_stats.py | 30 +- test/git/test_tree.py | 106 +- test/git/test_utils.py | 204 ++-- test/testlib/__init__.py | 2 +- test/testlib/asserts.py | 46 +- test/testlib/helper.py | 468 ++++---- 39 files changed, 9078 insertions(+), 9078 deletions(-) diff --git a/ez_setup.py b/ez_setup.py index 50d0075b..3031ad0d 100644 --- a/ez_setup.py +++ b/ez_setup.py @@ -4,8 +4,8 @@ If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: - from ez_setup import use_setuptools - use_setuptools() + from ez_setup import use_setuptools + use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying @@ -15,106 +15,106 @@ This file can also be run as a script to install or upgrade setuptools. """ import sys DEFAULT_VERSION = "0.6c3" -DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3] +DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3] md5_data = { - 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', - 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', - 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', - 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', - 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', - 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', - 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', - 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', - 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', - 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', - 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', - 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', - 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', - 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', - 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', + 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', + 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', + 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', + 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', + 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', + 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', + 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', + 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', + 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', + 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', + 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', + 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', + 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', + 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', + 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', } import sys, os def _validate_md5(egg_name, data): - if egg_name in md5_data: - from md5 import md5 - digest = md5(data).hexdigest() - if digest != md5_data[egg_name]: - print >>sys.stderr, ( - "md5 validation of %s failed! (Possible download problem?)" - % egg_name - ) - sys.exit(2) - return data + if egg_name in md5_data: + from md5 import md5 + digest = md5(data).hexdigest() + if digest != md5_data[egg_name]: + print >>sys.stderr, ( + "md5 validation of %s failed! (Possible download problem?)" + % egg_name + ) + sys.exit(2) + return data def use_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, - download_delay=15 + version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, + download_delay=15 ): - """Automatically find/download setuptools and make it available on sys.path - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end with - a '/'). `to_dir` is the directory where setuptools will be downloaded, if - it is not already available. If `download_delay` is specified, it should - be the number of seconds that will be paused before initiating a download, - should one be required. If an older version of setuptools is installed, - this routine will print a message to ``sys.stderr`` and raise SystemExit in - an attempt to abort the calling script. - """ - try: - import setuptools - if setuptools.__version__ == '0.0.1': - print >>sys.stderr, ( - "You have an obsolete version of setuptools installed. Please\n" - "remove it from your system entirely before rerunning this script." - ) - sys.exit(2) - except ImportError: - egg = download_setuptools(version, download_base, to_dir, download_delay) - sys.path.insert(0, egg) - import setuptools; setuptools.bootstrap_install_from = egg - - import pkg_resources - try: - pkg_resources.require("setuptools>="+version) - - except pkg_resources.VersionConflict, e: - # XXX could we install in a subprocess here? - print >>sys.stderr, ( - "The required version of setuptools (>=%s) is not available, and\n" - "can't be installed while this script is running. Please install\n" - " a more recent version first.\n\n(Currently using %r)" - ) % (version, e.args[0]) - sys.exit(2) + """Automatically find/download setuptools and make it available on sys.path + + `version` should be a valid setuptools version number that is available + as an egg for download under the `download_base` URL (which should end with + a '/'). `to_dir` is the directory where setuptools will be downloaded, if + it is not already available. If `download_delay` is specified, it should + be the number of seconds that will be paused before initiating a download, + should one be required. If an older version of setuptools is installed, + this routine will print a message to ``sys.stderr`` and raise SystemExit in + an attempt to abort the calling script. + """ + try: + import setuptools + if setuptools.__version__ == '0.0.1': + print >>sys.stderr, ( + "You have an obsolete version of setuptools installed. Please\n" + "remove it from your system entirely before rerunning this script." + ) + sys.exit(2) + except ImportError: + egg = download_setuptools(version, download_base, to_dir, download_delay) + sys.path.insert(0, egg) + import setuptools; setuptools.bootstrap_install_from = egg + + import pkg_resources + try: + pkg_resources.require("setuptools>="+version) + + except pkg_resources.VersionConflict, e: + # XXX could we install in a subprocess here? + print >>sys.stderr, ( + "The required version of setuptools (>=%s) is not available, and\n" + "can't be installed while this script is running. Please install\n" + " a more recent version first.\n\n(Currently using %r)" + ) % (version, e.args[0]) + sys.exit(2) def download_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, - delay = 15 + version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, + delay = 15 ): - """Download setuptools from a specified location and return its filename - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download attempt. - """ - import urllib2, shutil - egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) - url = download_base + egg_name - saveto = os.path.join(to_dir, egg_name) - src = dst = None - if not os.path.exists(saveto): # Avoid repeated downloads - try: - from distutils import log - if delay: - log.warn(""" + """Download setuptools from a specified location and return its filename + + `version` should be a valid setuptools version number that is available + as an egg for download under the `download_base` URL (which should end + with a '/'). `to_dir` is the directory where the egg will be downloaded. + `delay` is the number of seconds to pause before an actual download attempt. + """ + import urllib2, shutil + egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) + url = download_base + egg_name + saveto = os.path.join(to_dir, egg_name) + src = dst = None + if not os.path.exists(saveto): # Avoid repeated downloads + try: + from distutils import log + if delay: + log.warn(""" --------------------------------------------------------------------------- This script requires setuptools version %s to run (even to display -help). I will attempt to download it for you (from +help). I will attempt to download it for you (from %s), but you may need to enable firewall access for this script first. I will start the download in %d seconds. @@ -125,96 +125,96 @@ I will start the download in %d seconds. and place it in this directory before rerunning this script.) ---------------------------------------------------------------------------""", - version, download_base, delay, url - ); from time import sleep; sleep(delay) - log.warn("Downloading %s", url) - src = urllib2.urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = _validate_md5(egg_name, src.read()) - dst = open(saveto,"wb"); dst.write(data) - finally: - if src: src.close() - if dst: dst.close() - return os.path.realpath(saveto) + version, download_base, delay, url + ); from time import sleep; sleep(delay) + log.warn("Downloading %s", url) + src = urllib2.urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = _validate_md5(egg_name, src.read()) + dst = open(saveto,"wb"); dst.write(data) + finally: + if src: src.close() + if dst: dst.close() + return os.path.realpath(saveto) def main(argv, version=DEFAULT_VERSION): - """Install or upgrade setuptools and EasyInstall""" - - try: - import setuptools - except ImportError: - egg = None - try: - egg = download_setuptools(version, delay=0) - sys.path.insert(0,egg) - from setuptools.command.easy_install import main - return main(list(argv)+[egg]) # we're done here - finally: - if egg and os.path.exists(egg): - os.unlink(egg) - else: - if setuptools.__version__ == '0.0.1': - # tell the user to uninstall obsolete version - use_setuptools(version) - - req = "setuptools>="+version - import pkg_resources - try: - pkg_resources.require(req) - except pkg_resources.VersionConflict: - try: - from setuptools.command.easy_install import main - except ImportError: - from easy_install import main - main(list(argv)+[download_setuptools(delay=0)]) - sys.exit(0) # try to force an exit - else: - if argv: - from setuptools.command.easy_install import main - main(argv) - else: - print "Setuptools version",version,"or greater has been installed." - print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' + """Install or upgrade setuptools and EasyInstall""" + + try: + import setuptools + except ImportError: + egg = None + try: + egg = download_setuptools(version, delay=0) + sys.path.insert(0,egg) + from setuptools.command.easy_install import main + return main(list(argv)+[egg]) # we're done here + finally: + if egg and os.path.exists(egg): + os.unlink(egg) + else: + if setuptools.__version__ == '0.0.1': + # tell the user to uninstall obsolete version + use_setuptools(version) + + req = "setuptools>="+version + import pkg_resources + try: + pkg_resources.require(req) + except pkg_resources.VersionConflict: + try: + from setuptools.command.easy_install import main + except ImportError: + from easy_install import main + main(list(argv)+[download_setuptools(delay=0)]) + sys.exit(0) # try to force an exit + else: + if argv: + from setuptools.command.easy_install import main + main(argv) + else: + print "Setuptools version",version,"or greater has been installed." + print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' def update_md5(filenames): - """Update our built-in md5 registry""" + """Update our built-in md5 registry""" - import re - from md5 import md5 + import re + from md5 import md5 - for name in filenames: - base = os.path.basename(name) - f = open(name,'rb') - md5_data[base] = md5(f.read()).hexdigest() - f.close() + for name in filenames: + base = os.path.basename(name) + f = open(name,'rb') + md5_data[base] = md5(f.read()).hexdigest() + f.close() - data = [" %r: %r,\n" % it for it in md5_data.items()] - data.sort() - repl = "".join(data) + data = [" %r: %r,\n" % it for it in md5_data.items()] + data.sort() + repl = "".join(data) - import inspect - srcfile = inspect.getsourcefile(sys.modules[__name__]) - f = open(srcfile, 'rb'); src = f.read(); f.close() + import inspect + srcfile = inspect.getsourcefile(sys.modules[__name__]) + f = open(srcfile, 'rb'); src = f.read(); f.close() - match = re.search("\nmd5_data = {\n([^}]+)}", src) - if not match: - print >>sys.stderr, "Internal error!" - sys.exit(2) + match = re.search("\nmd5_data = {\n([^}]+)}", src) + if not match: + print >>sys.stderr, "Internal error!" + sys.exit(2) - src = src[:match.start(1)] + repl + src[match.end(1):] - f = open(srcfile,'w') - f.write(src) - f.close() + src = src[:match.start(1)] + repl + src[match.end(1):] + f = open(srcfile,'w') + f.write(src) + f.close() if __name__=='__main__': - if len(sys.argv)>2 and sys.argv[1]=='--md5update': - update_md5(sys.argv[2:]) - else: - main(sys.argv[1:]) + if len(sys.argv)>2 and sys.argv[1]=='--md5update': + update_md5(sys.argv[2:]) + else: + main(sys.argv[1:]) diff --git a/lib/git/__init__.py b/lib/git/__init__.py index 75eba3c9..aac539eb 100644 --- a/lib/git/__init__.py +++ b/lib/git/__init__.py @@ -23,4 +23,4 @@ from git.index import * from git.utils import LockFile, BlockingLockFile __all__ = [ name for name, obj in locals().items() - if not (name.startswith('_') or inspect.ismodule(obj)) ] + if not (name.startswith('_') or inspect.ismodule(obj)) ] diff --git a/lib/git/actor.py b/lib/git/actor.py index 5de55b8d..b5426f21 100644 --- a/lib/git/actor.py +++ b/lib/git/actor.py @@ -7,56 +7,56 @@ import re class Actor(object): - """Actors hold information about a person acting on the repository. They - can be committers and authors or anything with a name and an email as - mentioned in the git log entries.""" - # precompiled regex - name_only_regex = re.compile( r'<(.+)>' ) - name_email_regex = re.compile( r'(.*) <(.+?)>' ) - - def __init__(self, name, email): - self.name = name - self.email = email - - def __eq__(self, other): - return self.name == other.name and self.email == other.email - - def __ne__(self, other): - return not (self == other) - - def __hash__(self): - return hash((self.name, self.email)) - - def __str__(self): - return self.name - - def __repr__(self): - return '">' % (self.name, self.email) - - @classmethod - def _from_string(cls, string): - """ - Create an Actor from a string. - - ``str`` - is the string, which is expected to be in regular git format - - Format - John Doe - - Returns - Actor - """ - m = cls.name_email_regex.search(string) - if m: - name, email = m.groups() - return Actor(name, email) - else: - m = cls.name_only_regex.search(string) - if m: - return Actor(m.group(1), None) - else: - # assume best and use the whole string as name - return Actor(string, None) - # END special case name - # END handle name/email matching + """Actors hold information about a person acting on the repository. They + can be committers and authors or anything with a name and an email as + mentioned in the git log entries.""" + # precompiled regex + name_only_regex = re.compile( r'<(.+)>' ) + name_email_regex = re.compile( r'(.*) <(.+?)>' ) + + def __init__(self, name, email): + self.name = name + self.email = email + + def __eq__(self, other): + return self.name == other.name and self.email == other.email + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash((self.name, self.email)) + + def __str__(self): + return self.name + + def __repr__(self): + return '">' % (self.name, self.email) + + @classmethod + def _from_string(cls, string): + """ + Create an Actor from a string. + + ``str`` + is the string, which is expected to be in regular git format + + Format + John Doe + + Returns + Actor + """ + m = cls.name_email_regex.search(string) + if m: + name, email = m.groups() + return Actor(name, email) + else: + m = cls.name_only_regex.search(string) + if m: + return Actor(m.group(1), None) + else: + # assume best and use the whole string as name + return Actor(string, None) + # END special case name + # END handle name/email matching diff --git a/lib/git/cmd.py b/lib/git/cmd.py index e361e772..60912142 100644 --- a/lib/git/cmd.py +++ b/lib/git/cmd.py @@ -13,8 +13,8 @@ from errors import GitCommandError GIT_PYTHON_TRACE = os.environ.get("GIT_PYTHON_TRACE", False) execute_kwargs = ('istream', 'with_keep_cwd', 'with_extended_output', - 'with_exceptions', 'as_process', - 'output_stream' ) + 'with_exceptions', 'as_process', + 'output_stream' ) extra = {} # NOTE: Execution through a shell on windows appears to be slightly faster, but in fact @@ -23,401 +23,401 @@ extra = {} # between the OS which is why the shell should not be used ( unless it does not work # otherwise ) #if sys.platform == 'win32': -# extra = {'shell': False} +# extra = {'shell': False} def dashify(string): - return string.replace('_', '-') + return string.replace('_', '-') class Git(object): - """ - The Git class manages communication with the Git binary. - - It provides a convenient interface to calling the Git binary, such as in:: - - g = Git( git_dir ) - g.init() # calls 'git init' program - rval = g.ls_files() # calls 'git ls-files' program - - ``Debugging`` - Set the GIT_PYTHON_TRACE environment variable print each invocation - of the command to stdout. - Set its value to 'full' to see details about the returned values. - """ - __slots__ = ("_working_dir", "cat_file_all", "cat_file_header") - - class AutoInterrupt(object): - """ - Kill/Interrupt the stored process instance once this instance goes out of scope. It is - used to prevent processes piling up in case iterators stop reading. - Besides all attributes are wired through to the contained process object. - - The wait method was overridden to perform automatic status code checking - and possibly raise. - """ - __slots__= ("proc", "args") - - def __init__(self, proc, args ): - self.proc = proc - self.args = args - - def __del__(self): - # did the process finish already so we have a return code ? - if self.proc.poll() is not None: - return - - # try to kill it - try: - os.kill(self.proc.pid, 2) # interrupt signal - except AttributeError: - # try windows - # for some reason, providing None for stdout/stderr still prints something. This is why - # we simply use the shell and redirect to nul. Its slower than CreateProcess, question - # is whether we really want to see all these messages. Its annoying no matter what. - subprocess.call(("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(self.proc.pid)), shell=True) - # END exception handling - - def __getattr__(self, attr): - return getattr(self.proc, attr) - - def wait(self): - """ - Wait for the process and return its status code. - - Raise - GitCommandError if the return status is not 0 - """ - status = self.proc.wait() - if status != 0: - raise GitCommandError(self.args, status, self.proc.stderr.read()) - # END status handling - return status - - - - def __init__(self, working_dir=None): - """ - Initialize this instance with: - - ``working_dir`` - Git directory we should work in. If None, we always work in the current - directory as returned by os.getcwd(). - It is meant to be the working tree directory if available, or the - .git directory in case of bare repositories. - """ - super(Git, self).__init__() - self._working_dir = working_dir - - # cached command slots - self.cat_file_header = None - self.cat_file_all = None + """ + The Git class manages communication with the Git binary. + + It provides a convenient interface to calling the Git binary, such as in:: + + g = Git( git_dir ) + g.init() # calls 'git init' program + rval = g.ls_files() # calls 'git ls-files' program + + ``Debugging`` + Set the GIT_PYTHON_TRACE environment variable print each invocation + of the command to stdout. + Set its value to 'full' to see details about the returned values. + """ + __slots__ = ("_working_dir", "cat_file_all", "cat_file_header") + + class AutoInterrupt(object): + """ + Kill/Interrupt the stored process instance once this instance goes out of scope. It is + used to prevent processes piling up in case iterators stop reading. + Besides all attributes are wired through to the contained process object. + + The wait method was overridden to perform automatic status code checking + and possibly raise. + """ + __slots__= ("proc", "args") + + def __init__(self, proc, args ): + self.proc = proc + self.args = args + + def __del__(self): + # did the process finish already so we have a return code ? + if self.proc.poll() is not None: + return + + # try to kill it + try: + os.kill(self.proc.pid, 2) # interrupt signal + except AttributeError: + # try windows + # for some reason, providing None for stdout/stderr still prints something. This is why + # we simply use the shell and redirect to nul. Its slower than CreateProcess, question + # is whether we really want to see all these messages. Its annoying no matter what. + subprocess.call(("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(self.proc.pid)), shell=True) + # END exception handling + + def __getattr__(self, attr): + return getattr(self.proc, attr) + + def wait(self): + """ + Wait for the process and return its status code. + + Raise + GitCommandError if the return status is not 0 + """ + status = self.proc.wait() + if status != 0: + raise GitCommandError(self.args, status, self.proc.stderr.read()) + # END status handling + return status + + + + def __init__(self, working_dir=None): + """ + Initialize this instance with: + + ``working_dir`` + Git directory we should work in. If None, we always work in the current + directory as returned by os.getcwd(). + It is meant to be the working tree directory if available, or the + .git directory in case of bare repositories. + """ + super(Git, self).__init__() + self._working_dir = working_dir + + # cached command slots + self.cat_file_header = None + self.cat_file_all = None - def __getattr__(self, name): - """ - A convenience method as it allows to call the command as if it was - an object. - Returns - Callable object that will execute call _call_process with your arguments. - """ - if name[:1] == '_': - raise AttributeError(name) - return lambda *args, **kwargs: self._call_process(name, *args, **kwargs) + def __getattr__(self, name): + """ + A convenience method as it allows to call the command as if it was + an object. + Returns + Callable object that will execute call _call_process with your arguments. + """ + if name[:1] == '_': + raise AttributeError(name) + return lambda *args, **kwargs: self._call_process(name, *args, **kwargs) - @property - def working_dir(self): - """ - Returns - Git directory we are working on - """ - return self._working_dir + @property + def working_dir(self): + """ + Returns + Git directory we are working on + """ + return self._working_dir - def execute(self, command, - istream=None, - with_keep_cwd=False, - with_extended_output=False, - with_exceptions=True, - as_process=False, - output_stream=None - ): - """ - Handles executing the command on the shell and consumes and returns - the returned information (stdout) + def execute(self, command, + istream=None, + with_keep_cwd=False, + with_extended_output=False, + with_exceptions=True, + as_process=False, + output_stream=None + ): + """ + Handles executing the command on the shell and consumes and returns + the returned information (stdout) - ``command`` - The command argument list to execute. - It should be a string, or a sequence of program arguments. The - program to execute is the first item in the args sequence or string. + ``command`` + The command argument list to execute. + It should be a string, or a sequence of program arguments. The + program to execute is the first item in the args sequence or string. - ``istream`` - Standard input filehandle passed to subprocess.Popen. + ``istream`` + Standard input filehandle passed to subprocess.Popen. - ``with_keep_cwd`` - Whether to use the current working directory from os.getcwd(). - The cmd otherwise uses its own working_dir that it has been initialized - with if possible. + ``with_keep_cwd`` + Whether to use the current working directory from os.getcwd(). + The cmd otherwise uses its own working_dir that it has been initialized + with if possible. - ``with_extended_output`` - Whether to return a (status, stdout, stderr) tuple. + ``with_extended_output`` + Whether to return a (status, stdout, stderr) tuple. - ``with_exceptions`` - Whether to raise an exception when git returns a non-zero status. + ``with_exceptions`` + Whether to raise an exception when git returns a non-zero status. - ``as_process`` - Whether to return the created process instance directly from which - streams can be read on demand. This will render with_extended_output and - with_exceptions ineffective - the caller will have - to deal with the details himself. - It is important to note that the process will be placed into an AutoInterrupt - wrapper that will interrupt the process once it goes out of scope. If you - use the command in iterators, you should pass the whole process instance - instead of a single stream. - - ``output_stream`` - If set to a file-like object, data produced by the git command will be - output to the given stream directly. - This feature only has any effect if as_process is False. Processes will - always be created with a pipe due to issues with subprocess. - This merely is a workaround as data will be copied from the - output pipe to the given output stream directly. - - - Returns:: - - str(output) # extended_output = False (Default) - tuple(int(status), str(stdout), str(stderr)) # extended_output = True - - if ouput_stream is True, the stdout value will be your output stream: - output_stream # extended_output = False - tuple(int(status), output_stream, str(stderr))# extended_output = True - - Raise - GitCommandError - - NOTE - If you add additional keyword arguments to the signature of this method, - you must update the execute_kwargs tuple housed in this module. - """ - if GIT_PYTHON_TRACE and not GIT_PYTHON_TRACE == 'full': - print ' '.join(command) + ``as_process`` + Whether to return the created process instance directly from which + streams can be read on demand. This will render with_extended_output and + with_exceptions ineffective - the caller will have + to deal with the details himself. + It is important to note that the process will be placed into an AutoInterrupt + wrapper that will interrupt the process once it goes out of scope. If you + use the command in iterators, you should pass the whole process instance + instead of a single stream. + + ``output_stream`` + If set to a file-like object, data produced by the git command will be + output to the given stream directly. + This feature only has any effect if as_process is False. Processes will + always be created with a pipe due to issues with subprocess. + This merely is a workaround as data will be copied from the + output pipe to the given output stream directly. + + + Returns:: + + str(output) # extended_output = False (Default) + tuple(int(status), str(stdout), str(stderr)) # extended_output = True + + if ouput_stream is True, the stdout value will be your output stream: + output_stream # extended_output = False + tuple(int(status), output_stream, str(stderr))# extended_output = True + + Raise + GitCommandError + + NOTE + If you add additional keyword arguments to the signature of this method, + you must update the execute_kwargs tuple housed in this module. + """ + if GIT_PYTHON_TRACE and not GIT_PYTHON_TRACE == 'full': + print ' '.join(command) - # Allow the user to have the command executed in their working dir. - if with_keep_cwd or self._working_dir is None: - cwd = os.getcwd() - else: - cwd=self._working_dir - - # Start the process - proc = subprocess.Popen(command, - cwd=cwd, - stdin=istream, - stderr=subprocess.PIPE, - stdout=subprocess.PIPE, - **extra - ) - if as_process: - return self.AutoInterrupt(proc, command) - - # Wait for the process to return - status = 0 - stdout_value = '' - stderr_value = '' - try: - if output_stream is None: - stdout_value = proc.stdout.read().rstrip() # strip trailing "\n" - else: - max_chunk_size = 1024*64 - while True: - chunk = proc.stdout.read(max_chunk_size) - output_stream.write(chunk) - if len(chunk) < max_chunk_size: - break - # END reading output stream - stdout_value = output_stream - # END stdout handling - stderr_value = proc.stderr.read().rstrip() # strip trailing "\n" - - # waiting here should do nothing as we have finished stream reading - status = proc.wait() - finally: - proc.stdout.close() - proc.stderr.close() + # Allow the user to have the command executed in their working dir. + if with_keep_cwd or self._working_dir is None: + cwd = os.getcwd() + else: + cwd=self._working_dir + + # Start the process + proc = subprocess.Popen(command, + cwd=cwd, + stdin=istream, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + **extra + ) + if as_process: + return self.AutoInterrupt(proc, command) + + # Wait for the process to return + status = 0 + stdout_value = '' + stderr_value = '' + try: + if output_stream is None: + stdout_value = proc.stdout.read().rstrip() # strip trailing "\n" + else: + max_chunk_size = 1024*64 + while True: + chunk = proc.stdout.read(max_chunk_size) + output_stream.write(chunk) + if len(chunk) < max_chunk_size: + break + # END reading output stream + stdout_value = output_stream + # END stdout handling + stderr_value = proc.stderr.read().rstrip() # strip trailing "\n" + + # waiting here should do nothing as we have finished stream reading + status = proc.wait() + finally: + proc.stdout.close() + proc.stderr.close() - if with_exceptions and status != 0: - raise GitCommandError(command, status, stderr_value) + if with_exceptions and status != 0: + raise GitCommandError(command, status, stderr_value) - if GIT_PYTHON_TRACE == 'full': - if stderr_value: - print "%s -> %d: '%s' !! '%s'" % (command, status, stdout_value, stderr_value) - elif stdout_value: - print "%s -> %d: '%s'" % (command, status, stdout_value) - else: - print "%s -> %d" % (command, status) + if GIT_PYTHON_TRACE == 'full': + if stderr_value: + print "%s -> %d: '%s' !! '%s'" % (command, status, stdout_value, stderr_value) + elif stdout_value: + print "%s -> %d: '%s'" % (command, status, stdout_value) + else: + print "%s -> %d" % (command, status) - # Allow access to the command's status code - if with_extended_output: - return (status, stdout_value, stderr_value) - else: - return stdout_value + # Allow access to the command's status code + if with_extended_output: + return (status, stdout_value, stderr_value) + else: + return stdout_value - def transform_kwargs(self, **kwargs): - """ - Transforms Python style kwargs into git command line options. - """ - args = [] - for k, v in kwargs.items(): - if len(k) == 1: - if v is True: - args.append("-%s" % k) - elif type(v) is not bool: - args.append("-%s%s" % (k, v)) - else: - if v is True: - args.append("--%s" % dashify(k)) - elif type(v) is not bool: - args.append("--%s=%s" % (dashify(k), v)) - return args + def transform_kwargs(self, **kwargs): + """ + Transforms Python style kwargs into git command line options. + """ + args = [] + for k, v in kwargs.items(): + if len(k) == 1: + if v is True: + args.append("-%s" % k) + elif type(v) is not bool: + args.append("-%s%s" % (k, v)) + else: + if v is True: + args.append("--%s" % dashify(k)) + elif type(v) is not bool: + args.append("--%s=%s" % (dashify(k), v)) + return args - @classmethod - def __unpack_args(cls, arg_list): - if not isinstance(arg_list, (list,tuple)): - return [ str(arg_list) ] - - outlist = list() - for arg in arg_list: - if isinstance(arg_list, (list, tuple)): - outlist.extend(cls.__unpack_args( arg )) - # END recursion - else: - outlist.append(str(arg)) - # END for each arg - return outlist + @classmethod + def __unpack_args(cls, arg_list): + if not isinstance(arg_list, (list,tuple)): + return [ str(arg_list) ] + + outlist = list() + for arg in arg_list: + if isinstance(arg_list, (list, tuple)): + outlist.extend(cls.__unpack_args( arg )) + # END recursion + else: + outlist.append(str(arg)) + # END for each arg + return outlist - def _call_process(self, method, *args, **kwargs): - """ - Run the given git command with the specified arguments and return - the result as a String + def _call_process(self, method, *args, **kwargs): + """ + Run the given git command with the specified arguments and return + the result as a String - ``method`` - is the command. Contained "_" characters will be converted to dashes, - such as in 'ls_files' to call 'ls-files'. + ``method`` + is the command. Contained "_" characters will be converted to dashes, + such as in 'ls_files' to call 'ls-files'. - ``args`` - is the list of arguments. If None is included, it will be pruned. - This allows your commands to call git more conveniently as None - is realized as non-existent + ``args`` + is the list of arguments. If None is included, it will be pruned. + This allows your commands to call git more conveniently as None + is realized as non-existent - ``kwargs`` - is a dict of keyword arguments. - This function accepts the same optional keyword arguments - as execute(). + ``kwargs`` + is a dict of keyword arguments. + This function accepts the same optional keyword arguments + as execute(). - Examples:: - git.rev_list('master', max_count=10, header=True) + Examples:: + git.rev_list('master', max_count=10, header=True) - Returns - Same as execute() - """ + Returns + Same as execute() + """ - # Handle optional arguments prior to calling transform_kwargs - # otherwise these'll end up in args, which is bad. - _kwargs = {} - for kwarg in execute_kwargs: - try: - _kwargs[kwarg] = kwargs.pop(kwarg) - except KeyError: - pass + # Handle optional arguments prior to calling transform_kwargs + # otherwise these'll end up in args, which is bad. + _kwargs = {} + for kwarg in execute_kwargs: + try: + _kwargs[kwarg] = kwargs.pop(kwarg) + except KeyError: + pass - # Prepare the argument list - opt_args = self.transform_kwargs(**kwargs) - - ext_args = self.__unpack_args([a for a in args if a is not None]) - args = opt_args + ext_args + # Prepare the argument list + opt_args = self.transform_kwargs(**kwargs) + + ext_args = self.__unpack_args([a for a in args if a is not None]) + args = opt_args + ext_args - call = ["git", dashify(method)] - call.extend(args) + call = ["git", dashify(method)] + call.extend(args) - return self.execute(call, **_kwargs) - - def _parse_object_header(self, header_line): - """ - ``header_line`` - type_string size_as_int - - Returns - (hex_sha, type_string, size_as_int) - - Raises - ValueError if the header contains indication for an error due to incorrect - input sha - """ - tokens = header_line.split() - if len(tokens) != 3: - raise ValueError("SHA named %s could not be resolved, git returned: %r" % (tokens[0], header_line.strip()) ) - if len(tokens[0]) != 40: - raise ValueError("Failed to parse header: %r" % header_line) - return (tokens[0], tokens[1], int(tokens[2])) - - def __prepare_ref(self, ref): - # required for command to separate refs on stdin - refstr = str(ref) # could be ref-object - if refstr.endswith("\n"): - return refstr - return refstr + "\n" - - def __get_persistent_cmd(self, attr_name, cmd_name, *args,**kwargs): - cur_val = getattr(self, attr_name) - if cur_val is not None: - return cur_val - - options = { "istream" : subprocess.PIPE, "as_process" : True } - options.update( kwargs ) - - cmd = self._call_process( cmd_name, *args, **options ) - setattr(self, attr_name, cmd ) - return cmd - - def __get_object_header(self, cmd, ref): - cmd.stdin.write(self.__prepare_ref(ref)) - cmd.stdin.flush() - return self._parse_object_header(cmd.stdout.readline()) - - def get_object_header(self, ref): - """ - Use this method to quickly examine the type and size of the object behind - the given ref. - - NOTE - The method will only suffer from the costs of command invocation - once and reuses the command in subsequent calls. - - Return: - (hexsha, type_string, size_as_int) - """ - cmd = self.__get_persistent_cmd("cat_file_header", "cat_file", batch_check=True) - return self.__get_object_header(cmd, ref) - - def get_object_data(self, ref): - """ - As get_object_header, but returns object data as well - - Return: - (hexsha, type_string, size_as_int,data_string) - """ - cmd = self.__get_persistent_cmd("cat_file_all", "cat_file", batch=True) - hexsha, typename, size = self.__get_object_header(cmd, ref) - data = cmd.stdout.read(size) - cmd.stdout.read(1) # finishing newlines - - return (hexsha, typename, size, data) - - def clear_cache(self): - """ - Clear all kinds of internal caches to release resources. - - Currently persistent commands will be interrupted. - - Returns - self - """ - self.cat_file_all = None - self.cat_file_header = None - return self + return self.execute(call, **_kwargs) + + def _parse_object_header(self, header_line): + """ + ``header_line`` + type_string size_as_int + + Returns + (hex_sha, type_string, size_as_int) + + Raises + ValueError if the header contains indication for an error due to incorrect + input sha + """ + tokens = header_line.split() + if len(tokens) != 3: + raise ValueError("SHA named %s could not be resolved, git returned: %r" % (tokens[0], header_line.strip()) ) + if len(tokens[0]) != 40: + raise ValueError("Failed to parse header: %r" % header_line) + return (tokens[0], tokens[1], int(tokens[2])) + + def __prepare_ref(self, ref): + # required for command to separate refs on stdin + refstr = str(ref) # could be ref-object + if refstr.endswith("\n"): + return refstr + return refstr + "\n" + + def __get_persistent_cmd(self, attr_name, cmd_name, *args,**kwargs): + cur_val = getattr(self, attr_name) + if cur_val is not None: + return cur_val + + options = { "istream" : subprocess.PIPE, "as_process" : True } + options.update( kwargs ) + + cmd = self._call_process( cmd_name, *args, **options ) + setattr(self, attr_name, cmd ) + return cmd + + def __get_object_header(self, cmd, ref): + cmd.stdin.write(self.__prepare_ref(ref)) + cmd.stdin.flush() + return self._parse_object_header(cmd.stdout.readline()) + + def get_object_header(self, ref): + """ + Use this method to quickly examine the type and size of the object behind + the given ref. + + NOTE + The method will only suffer from the costs of command invocation + once and reuses the command in subsequent calls. + + Return: + (hexsha, type_string, size_as_int) + """ + cmd = self.__get_persistent_cmd("cat_file_header", "cat_file", batch_check=True) + return self.__get_object_header(cmd, ref) + + def get_object_data(self, ref): + """ + As get_object_header, but returns object data as well + + Return: + (hexsha, type_string, size_as_int,data_string) + """ + cmd = self.__get_persistent_cmd("cat_file_all", "cat_file", batch=True) + hexsha, typename, size = self.__get_object_header(cmd, ref) + data = cmd.stdout.read(size) + cmd.stdout.read(1) # finishing newlines + + return (hexsha, typename, size, data) + + def clear_cache(self): + """ + Clear all kinds of internal caches to release resources. + + Currently persistent commands will be interrupted. + + Returns + self + """ + self.cat_file_all = None + self.cat_file_header = None + return self diff --git a/lib/git/config.py b/lib/git/config.py index 6228ebfa..e5fd9902 100644 --- a/lib/git/config.py +++ b/lib/git/config.py @@ -18,403 +18,403 @@ from git.odict import OrderedDict from git.utils import LockFile class _MetaParserBuilder(type): - """ - Utlity class wrapping base-class methods into decorators that assure read-only properties - """ - def __new__(metacls, name, bases, clsdict): - """ - Equip all base-class methods with a _needs_values decorator, and all non-const methods - with a _set_dirty_and_flush_changes decorator in addition to that. - """ - mutating_methods = clsdict['_mutating_methods_'] - for base in bases: - methods = ( t for t in inspect.getmembers(base, inspect.ismethod) if not t[0].startswith("_") ) - for name, method in methods: - if name in clsdict: - continue - method_with_values = _needs_values(method) - if name in mutating_methods: - method_with_values = _set_dirty_and_flush_changes(method_with_values) - # END mutating methods handling - - clsdict[name] = method_with_values - # END for each base - - new_type = super(_MetaParserBuilder, metacls).__new__(metacls, name, bases, clsdict) - return new_type - - + """ + Utlity class wrapping base-class methods into decorators that assure read-only properties + """ + def __new__(metacls, name, bases, clsdict): + """ + Equip all base-class methods with a _needs_values decorator, and all non-const methods + with a _set_dirty_and_flush_changes decorator in addition to that. + """ + mutating_methods = clsdict['_mutating_methods_'] + for base in bases: + methods = ( t for t in inspect.getmembers(base, inspect.ismethod) if not t[0].startswith("_") ) + for name, method in methods: + if name in clsdict: + continue + method_with_values = _needs_values(method) + if name in mutating_methods: + method_with_values = _set_dirty_and_flush_changes(method_with_values) + # END mutating methods handling + + clsdict[name] = method_with_values + # END for each base + + new_type = super(_MetaParserBuilder, metacls).__new__(metacls, name, bases, clsdict) + return new_type + + def _needs_values(func): - """ - Returns method assuring we read values (on demand) before we try to access them - """ - def assure_data_present(self, *args, **kwargs): - self.read() - return func(self, *args, **kwargs) - # END wrapper method - assure_data_present.__name__ = func.__name__ - return assure_data_present - + """ + Returns method assuring we read values (on demand) before we try to access them + """ + def assure_data_present(self, *args, **kwargs): + self.read() + return func(self, *args, **kwargs) + # END wrapper method + assure_data_present.__name__ = func.__name__ + return assure_data_present + def _set_dirty_and_flush_changes(non_const_func): - """ - Return method that checks whether given non constant function may be called. - If so, the instance will be set dirty. - Additionally, we flush the changes right to disk - """ - def flush_changes(self, *args, **kwargs): - rval = non_const_func(self, *args, **kwargs) - self.write() - return rval - # END wrapper method - flush_changes.__name__ = non_const_func.__name__ - return flush_changes - - + """ + Return method that checks whether given non constant function may be called. + If so, the instance will be set dirty. + Additionally, we flush the changes right to disk + """ + def flush_changes(self, *args, **kwargs): + rval = non_const_func(self, *args, **kwargs) + self.write() + return rval + # END wrapper method + flush_changes.__name__ = non_const_func.__name__ + return flush_changes + + class GitConfigParser(cp.RawConfigParser, object): - """ - Implements specifics required to read git style configuration files. - - This variation behaves much like the git.config command such that the configuration - will be read on demand based on the filepath given during initialization. - - The changes will automatically be written once the instance goes out of scope, but - can be triggered manually as well. - - The configuration file will be locked if you intend to change values preventing other - instances to write concurrently. - - NOTE - The config is case-sensitive even when queried, hence section and option names - must match perfectly. - """ - __metaclass__ = _MetaParserBuilder - - - #{ Configuration - # The lock type determines the type of lock to use in new configuration readers. - # They must be compatible to the LockFile interface. - # A suitable alternative would be the BlockingLockFile - t_lock = LockFile - - #} END configuration - - OPTCRE = re.compile( - r'\s?(?P