summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSebastian Thiel <sebastian.thiel@icloud.com>2022-05-18 07:43:53 +0800
committerSebastian Thiel <sebastian.thiel@icloud.com>2022-05-18 07:43:53 +0800
commit21ec529987d10e0010badd37f8da3274167d436f (patch)
treea3394cfe902ce7edd07c89420c21c13274a2d295
parentb30720ee4d9762a03eae4fa7cfa4b0190d81784d (diff)
downloadgitpython-21ec529987d10e0010badd37f8da3274167d436f.tar.gz
Run everything through 'black'
That way people who use it won't be deterred, while it unifies style everywhere.
-rw-r--r--doc/source/conf.py92
-rw-r--r--git/__init__.py54
-rw-r--r--git/cmd.py592
-rw-r--r--git/compat.py37
-rw-r--r--git/config.py274
-rw-r--r--git/db.py15
-rw-r--r--git/diff.py377
-rw-r--r--git/exc.py78
m---------git/ext/gitdb0
-rw-r--r--git/index/base.py479
-rw-r--r--git/index/fun.py185
-rw-r--r--git/index/typ.py67
-rw-r--r--git/index/util.py35
-rw-r--r--git/objects/__init__.py10
-rw-r--r--git/objects/base.py61
-rw-r--r--git/objects/blob.py7
-rw-r--r--git/objects/commit.py339
-rw-r--r--git/objects/fun.py75
-rw-r--r--git/objects/submodule/base.py536
-rw-r--r--git/objects/submodule/root.py204
-rw-r--r--git/objects/submodule/util.py40
-rw-r--r--git/objects/tag.py50
-rw-r--r--git/objects/tree.py158
-rw-r--r--git/objects/util.py362
-rw-r--r--git/refs/head.py87
-rw-r--r--git/refs/log.py117
-rw-r--r--git/refs/reference.py58
-rw-r--r--git/refs/remote.py21
-rw-r--r--git/refs/symbolic.py266
-rw-r--r--git/refs/tag.py42
-rw-r--r--git/remote.py538
-rw-r--r--git/repo/base.py582
-rw-r--r--git/repo/fun.py131
-rw-r--r--git/types.py61
-rw-r--r--git/util.py453
-rwxr-xr-xsetup.py34
-rw-r--r--test/lib/__init__.py7
-rw-r--r--test/lib/helper.py146
-rw-r--r--test/performance/lib.py43
-rw-r--r--test/performance/test_commit.py52
-rw-r--r--test/performance/test_odb.py39
-rw-r--r--test/performance/test_streams.py87
-rw-r--r--test/test_actor.py1
-rw-r--r--test/test_base.py55
-rw-r--r--test/test_blob.py9
-rw-r--r--test/test_clone.py17
-rw-r--r--test/test_commit.py252
-rw-r--r--test/test_config.py289
-rw-r--r--test/test_db.py3
-rw-r--r--test/test_diff.py236
-rw-r--r--test/test_docs.py433
-rw-r--r--test/test_exc.py83
-rw-r--r--test/test_fun.py103
-rw-r--r--test/test_git.py173
-rw-r--r--test/test_index.py311
-rw-r--r--test/test_installation.py61
-rw-r--r--test/test_reflog.py37
-rw-r--r--test/test_refs.py137
-rw-r--r--test/test_remote.py218
-rw-r--r--test/test_repo.py477
-rw-r--r--test/test_stats.py24
-rw-r--r--test/test_submodule.py567
-rw-r--r--test/test_tree.py38
-rw-r--r--test/test_util.py172
-rw-r--r--test/tstrunner.py3
65 files changed, 6673 insertions, 3917 deletions
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 286058fd..d2803a82 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -20,38 +20,40 @@ import os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
-sys.path.insert(0, os.path.abspath('../..'))
+# sys.path.append(os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath("../.."))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest']
+extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest"]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
# The encoding of source files.
-#source_encoding = 'utf-8'
+# source_encoding = 'utf-8'
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# General information about the project.
-project = 'GitPython'
-copyright = 'Copyright (C) 2008, 2009 Michael Trier and contributors, 2010-2015 Sebastian Thiel'
+project = "GitPython"
+copyright = (
+ "Copyright (C) 2008, 2009 Michael Trier and contributors, 2010-2015 Sebastian Thiel"
+)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-with open(os.path.join(os.path.dirname(__file__), "..", "..", 'VERSION')) as fd:
+with open(os.path.join(os.path.dirname(__file__), "..", "..", "VERSION")) as fd:
VERSION = fd.readline().strip()
version = VERSION
# The full version, including alpha/beta/rc tags.
@@ -59,61 +61,60 @@ release = VERSION
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
-#language = None
+# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
-#today = ''
+# today = ''
# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
-#unused_docs = []
+# unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
-exclude_trees = ['build']
+exclude_trees = ["build"]
# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
+# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
-#show_authors = False
+# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
# Options for HTML output
# -----------------------
-html_theme = 'sphinx_rtd_theme'
-html_theme_options = {
-}
+html_theme = "sphinx_rtd_theme"
+html_theme_options = {}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-#html_title = None
+# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-#html_logo = None
+# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-#html_favicon = None
+# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -122,72 +123,71 @@ html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
# If false, no module index is generated.
-#html_use_modindex = True
+# html_use_modindex = True
# If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
# If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
-#html_copy_source = True
+# html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = ''
+# html_file_suffix = ''
# Output file base name for HTML help builder.
-htmlhelp_basename = 'gitpythondoc'
+htmlhelp_basename = "gitpythondoc"
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
+# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
+# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
- ('index', 'GitPython.tex', r'GitPython Documentation',
- r'Michael Trier', 'manual'),
+ ("index", "GitPython.tex", r"GitPython Documentation", r"Michael Trier", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
-#latex_logo = None
+# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
+# latex_preamble = ''
# Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
# If false, no module index is generated.
-#latex_use_modindex = True
+# latex_use_modindex = True
diff --git a/git/__init__.py b/git/__init__.py
index ae9254a2..3f26886f 100644
--- a/git/__init__.py
+++ b/git/__init__.py
@@ -4,8 +4,8 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
# flake8: noqa
-#@PydevCodeAnalysisIgnore
-from git.exc import * # @NoMove @IgnorePep8
+# @PydevCodeAnalysisIgnore
+from git.exc import * # @NoMove @IgnorePep8
import inspect
import os
import sys
@@ -14,14 +14,14 @@ import os.path as osp
from typing import Optional
from git.types import PathLike
-__version__ = 'git'
+__version__ = "git"
-#{ Initialization
+# { Initialization
def _init_externals() -> None:
"""Initialize external projects by putting them into the path"""
- if __version__ == 'git' and 'PYOXIDIZER' not in os.environ:
- sys.path.insert(1, osp.join(osp.dirname(__file__), 'ext', 'gitdb'))
+ if __version__ == "git" and "PYOXIDIZER" not in os.environ:
+ sys.path.insert(1, osp.join(osp.dirname(__file__), "ext", "gitdb"))
try:
import gitdb
@@ -29,26 +29,27 @@ def _init_externals() -> None:
raise ImportError("'gitdb' could not be found in your PYTHONPATH") from e
# END verify import
-#} END initialization
+
+# } END initialization
#################
_init_externals()
#################
-#{ Imports
+# { Imports
try:
from git.config import GitConfigParser # @NoMove @IgnorePep8
- from git.objects import * # @NoMove @IgnorePep8
- from git.refs import * # @NoMove @IgnorePep8
- from git.diff import * # @NoMove @IgnorePep8
- from git.db import * # @NoMove @IgnorePep8
- from git.cmd import Git # @NoMove @IgnorePep8
- from git.repo import Repo # @NoMove @IgnorePep8
- from git.remote import * # @NoMove @IgnorePep8
- from git.index import * # @NoMove @IgnorePep8
- from git.util import ( # @NoMove @IgnorePep8
+ from git.objects import * # @NoMove @IgnorePep8
+ from git.refs import * # @NoMove @IgnorePep8
+ from git.diff import * # @NoMove @IgnorePep8
+ from git.db import * # @NoMove @IgnorePep8
+ from git.cmd import Git # @NoMove @IgnorePep8
+ from git.repo import Repo # @NoMove @IgnorePep8
+ from git.remote import * # @NoMove @IgnorePep8
+ from git.index import * # @NoMove @IgnorePep8
+ from git.util import ( # @NoMove @IgnorePep8
LockFile,
BlockingLockFile,
Stats,
@@ -56,15 +57,18 @@ try:
rmtree,
)
except GitError as exc:
- raise ImportError('%s: %s' % (exc.__class__.__name__, exc)) from exc
+ raise ImportError("%s: %s" % (exc.__class__.__name__, exc)) from exc
-#} END imports
+# } END imports
-__all__ = [name for name, obj in locals().items()
- if not (name.startswith('_') or inspect.ismodule(obj))]
+__all__ = [
+ name
+ for name, obj in locals().items()
+ if not (name.startswith("_") or inspect.ismodule(obj))
+]
-#{ Initialize git executable path
+# { Initialize git executable path
GIT_OK = None
@@ -79,12 +83,14 @@ def refresh(path: Optional[PathLike] = None) -> None:
return
GIT_OK = True
-#} END initialize git executable path
+
+
+# } END initialize git executable path
#################
try:
refresh()
except Exception as exc:
- raise ImportError('Failed to initialize: {0}'.format(exc)) from exc
+ raise ImportError("Failed to initialize: {0}".format(exc)) from exc
#################
diff --git a/git/cmd.py b/git/cmd.py
index 1ddf9e03..12409b0c 100644
--- a/git/cmd.py
+++ b/git/cmd.py
@@ -9,12 +9,7 @@ import io
import logging
import os
import signal
-from subprocess import (
- call,
- Popen,
- PIPE,
- DEVNULL
-)
+from subprocess import call, Popen, PIPE, DEVNULL
import subprocess
import threading
from textwrap import dedent
@@ -29,10 +24,7 @@ from git.compat import (
from git.exc import CommandError
from git.util import is_cygwin_git, cygpath, expand_path, remove_password_if_present
-from .exc import (
- GitCommandError,
- GitCommandNotFound
-)
+from .exc import GitCommandError, GitCommandNotFound
from .util import (
LazyMixin,
stream_copy,
@@ -40,8 +32,24 @@ from .util import (
# typing ---------------------------------------------------------------------------
-from typing import (Any, AnyStr, BinaryIO, Callable, Dict, IO, Iterator, List, Mapping,
- Sequence, TYPE_CHECKING, TextIO, Tuple, Union, cast, overload)
+from typing import (
+ Any,
+ AnyStr,
+ BinaryIO,
+ Callable,
+ Dict,
+ IO,
+ Iterator,
+ List,
+ Mapping,
+ Sequence,
+ TYPE_CHECKING,
+ TextIO,
+ Tuple,
+ Union,
+ cast,
+ overload,
+)
from git.types import PathLike, Literal, TBD
@@ -52,15 +60,26 @@ if TYPE_CHECKING:
# ---------------------------------------------------------------------------------
-execute_kwargs = {'istream', 'with_extended_output',
- 'with_exceptions', 'as_process', 'stdout_as_string',
- 'output_stream', 'with_stdout', 'kill_after_timeout',
- 'universal_newlines', 'shell', 'env', 'max_chunk_size', 'strip_newline_in_stdout'}
+execute_kwargs = {
+ "istream",
+ "with_extended_output",
+ "with_exceptions",
+ "as_process",
+ "stdout_as_string",
+ "output_stream",
+ "with_stdout",
+ "kill_after_timeout",
+ "universal_newlines",
+ "shell",
+ "env",
+ "max_chunk_size",
+ "strip_newline_in_stdout",
+}
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
-__all__ = ('Git',)
+__all__ = ("Git",)
# ==============================================================================
@@ -69,18 +88,24 @@ __all__ = ('Git',)
# Documentation
## @{
-def handle_process_output(process: 'Git.AutoInterrupt' | Popen,
- stdout_handler: Union[None,
- Callable[[AnyStr], None],
- Callable[[List[AnyStr]], None],
- Callable[[bytes, 'Repo', 'DiffIndex'], None]],
- stderr_handler: Union[None,
- Callable[[AnyStr], None],
- Callable[[List[AnyStr]], None]],
- finalizer: Union[None,
- Callable[[Union[subprocess.Popen, 'Git.AutoInterrupt']], None]] = None,
- decode_streams: bool = True,
- kill_after_timeout: Union[None, float] = None) -> None:
+
+def handle_process_output(
+ process: "Git.AutoInterrupt" | Popen,
+ stdout_handler: Union[
+ None,
+ Callable[[AnyStr], None],
+ Callable[[List[AnyStr]], None],
+ Callable[[bytes, "Repo", "DiffIndex"], None],
+ ],
+ stderr_handler: Union[
+ None, Callable[[AnyStr], None], Callable[[List[AnyStr]], None]
+ ],
+ finalizer: Union[
+ None, Callable[[Union[subprocess.Popen, "Git.AutoInterrupt"]], None]
+ ] = None,
+ decode_streams: bool = True,
+ kill_after_timeout: Union[None, float] = None,
+) -> None:
"""Registers for notifications to learn that process output is ready to read, and dispatches lines to
the respective line handlers.
This function returns once the finalizer returns
@@ -101,8 +126,13 @@ def handle_process_output(process: 'Git.AutoInterrupt' | Popen,
should be killed.
"""
# Use 2 "pump" threads and wait for both to finish.
- def pump_stream(cmdline: List[str], name: str, stream: Union[BinaryIO, TextIO], is_decode: bool,
- handler: Union[None, Callable[[Union[bytes, str]], None]]) -> None:
+ def pump_stream(
+ cmdline: List[str],
+ name: str,
+ stream: Union[BinaryIO, TextIO],
+ is_decode: bool,
+ handler: Union[None, Callable[[Union[bytes, str]], None]],
+ ) -> None:
try:
for line in stream:
if handler:
@@ -114,21 +144,25 @@ def handle_process_output(process: 'Git.AutoInterrupt' | Popen,
handler(line)
except Exception as ex:
- log.error(f"Pumping {name!r} of cmd({remove_password_if_present(cmdline)}) failed due to: {ex!r}")
+ log.error(
+ f"Pumping {name!r} of cmd({remove_password_if_present(cmdline)}) failed due to: {ex!r}"
+ )
if "I/O operation on closed file" not in str(ex):
# Only reraise if the error was not due to the stream closing
- raise CommandError([f'<{name}-pump>'] + remove_password_if_present(cmdline), ex) from ex
+ raise CommandError(
+ [f"<{name}-pump>"] + remove_password_if_present(cmdline), ex
+ ) from ex
finally:
stream.close()
- if hasattr(process, 'proc'):
- process = cast('Git.AutoInterrupt', process)
- cmdline: str | Tuple[str, ...] | List[str] = getattr(process.proc, 'args', '')
+ if hasattr(process, "proc"):
+ process = cast("Git.AutoInterrupt", process)
+ cmdline: str | Tuple[str, ...] | List[str] = getattr(process.proc, "args", "")
p_stdout = process.proc.stdout if process.proc else None
p_stderr = process.proc.stderr if process.proc else None
else:
process = cast(Popen, process)
- cmdline = getattr(process, 'args', '')
+ cmdline = getattr(process, "args", "")
p_stdout = process.stdout
p_stderr = process.stderr
@@ -137,15 +171,16 @@ def handle_process_output(process: 'Git.AutoInterrupt' | Popen,
pumps: List[Tuple[str, IO, Callable[..., None] | None]] = []
if p_stdout:
- pumps.append(('stdout', p_stdout, stdout_handler))
+ pumps.append(("stdout", p_stdout, stdout_handler))
if p_stderr:
- pumps.append(('stderr', p_stderr, stderr_handler))
+ pumps.append(("stderr", p_stderr, stderr_handler))
threads: List[threading.Thread] = []
for name, stream, handler in pumps:
- t = threading.Thread(target=pump_stream,
- args=(cmdline, name, stream, decode_streams, handler))
+ t = threading.Thread(
+ target=pump_stream, args=(cmdline, name, stream, decode_streams, handler)
+ )
t.daemon = True
t.start()
threads.append(t)
@@ -158,12 +193,15 @@ def handle_process_output(process: 'Git.AutoInterrupt' | Popen,
if isinstance(process, Git.AutoInterrupt):
process._terminate()
else: # Don't want to deal with the other case
- raise RuntimeError("Thread join() timed out in cmd.handle_process_output()."
- f" kill_after_timeout={kill_after_timeout} seconds")
+ raise RuntimeError(
+ "Thread join() timed out in cmd.handle_process_output()."
+ f" kill_after_timeout={kill_after_timeout} seconds"
+ )
if stderr_handler:
error_str: Union[str, bytes] = (
"error: process killed because it timed out."
- f" kill_after_timeout={kill_after_timeout} seconds")
+ f" kill_after_timeout={kill_after_timeout} seconds"
+ )
if not decode_streams and isinstance(p_stderr, BinaryIO):
# Assume stderr_handler needs binary input
error_str = cast(str, error_str)
@@ -179,19 +217,22 @@ def handle_process_output(process: 'Git.AutoInterrupt' | Popen,
def dashify(string: str) -> str:
- return string.replace('_', '-')
+ return string.replace("_", "-")
def slots_to_dict(self: object, exclude: Sequence[str] = ()) -> Dict[str, Any]:
return {s: getattr(self, s) for s in self.__slots__ if s not in exclude}
-def dict_to_slots_and__excluded_are_none(self: object, d: Mapping[str, Any], excluded: Sequence[str] = ()) -> None:
+def dict_to_slots_and__excluded_are_none(
+ self: object, d: Mapping[str, Any], excluded: Sequence[str] = ()
+) -> None:
for k, v in d.items():
setattr(self, k, v)
for k in excluded:
setattr(self, k, None)
+
## -- End Utilities -- @}
@@ -200,8 +241,11 @@ CREATE_NO_WINDOW = 0x08000000
## CREATE_NEW_PROCESS_GROUP is needed to allow killing it afterwards,
# see https://docs.python.org/3/library/subprocess.html#subprocess.Popen.send_signal
-PROC_CREATIONFLAGS = (CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP # type: ignore[attr-defined]
- if is_win else 0) # mypy error if not windows
+PROC_CREATIONFLAGS = (
+ CREATE_NO_WINDOW | subprocess.CREATE_NEW_PROCESS_GROUP # type: ignore[attr-defined]
+ if is_win
+ else 0
+) # mypy error if not windows
class Git(LazyMixin):
@@ -220,10 +264,18 @@ class Git(LazyMixin):
of the command to stdout.
Set its value to 'full' to see details about the returned values.
"""
- __slots__ = ("_working_dir", "cat_file_all", "cat_file_header", "_version_info",
- "_git_options", "_persistent_git_options", "_environment")
- _excluded_ = ('cat_file_all', 'cat_file_header', '_version_info')
+ __slots__ = (
+ "_working_dir",
+ "cat_file_all",
+ "cat_file_header",
+ "_version_info",
+ "_git_options",
+ "_persistent_git_options",
+ "_environment",
+ )
+
+ _excluded_ = ("cat_file_all", "cat_file_header", "_version_info")
def __getstate__(self) -> Dict[str, Any]:
return slots_to_dict(self, exclude=self._excluded_)
@@ -233,7 +285,7 @@ class Git(LazyMixin):
# CONFIGURATION
- git_exec_name = "git" # default that should work on linux and windows
+ git_exec_name = "git" # default that should work on linux and windows
# Enables debugging of GitPython's git commands
GIT_PYTHON_TRACE = os.environ.get("GIT_PYTHON_TRACE", False)
@@ -282,13 +334,18 @@ class Git(LazyMixin):
# warn or raise exception if test failed
if not has_git:
- err = dedent("""\
+ err = (
+ dedent(
+ """\
Bad git executable.
The git executable must be specified in one of the following ways:
- be included in your $PATH
- be set via $%s
- explicitly set via git.refresh()
- """) % cls._git_exec_env_var
+ """
+ )
+ % cls._git_exec_env_var
+ )
# revert to whatever the old_git was
cls.GIT_PYTHON_GIT_EXECUTABLE = old_git
@@ -314,7 +371,9 @@ class Git(LazyMixin):
if mode in quiet:
pass
elif mode in warn or mode in error:
- err = dedent("""\
+ err = (
+ dedent(
+ """\
%s
All git commands will error until this is rectified.
@@ -326,32 +385,42 @@ class Git(LazyMixin):
Example:
export %s=%s
- """) % (
- err,
- cls._refresh_env_var,
- "|".join(quiet),
- "|".join(warn),
- "|".join(error),
- cls._refresh_env_var,
- quiet[0])
+ """
+ )
+ % (
+ err,
+ cls._refresh_env_var,
+ "|".join(quiet),
+ "|".join(warn),
+ "|".join(error),
+ cls._refresh_env_var,
+ quiet[0],
+ )
+ )
if mode in warn:
print("WARNING: %s" % err)
else:
raise ImportError(err)
else:
- err = dedent("""\
+ err = (
+ dedent(
+ """\
%s environment variable has been set but it has been set with an invalid value.
Use only the following values:
- %s: for no warning or exception
- %s: for a printed warning
- %s: for a raised exception
- """) % (
- cls._refresh_env_var,
- "|".join(quiet),
- "|".join(warn),
- "|".join(error))
+ """
+ )
+ % (
+ cls._refresh_env_var,
+ "|".join(quiet),
+ "|".join(warn),
+ "|".join(error),
+ )
+ )
raise ImportError(err)
# we get here if this was the init refresh and the refresh mode
@@ -395,7 +464,7 @@ class Git(LazyMixin):
Hence we undo the escaping just to be sure.
"""
url = os.path.expandvars(url)
- if url.startswith('~'):
+ if url.startswith("~"):
url = os.path.expanduser(url)
url = url.replace("\\\\", "\\").replace("\\", "/")
return url
@@ -441,7 +510,7 @@ class Git(LazyMixin):
log.info("Ignored error after process had died: %r", ex)
# can be that nothing really exists anymore ...
- if os is None or getattr(os, 'kill', None) is None:
+ if os is None or getattr(os, "kill", None) is None:
return None
# try to kill it
@@ -458,7 +527,10 @@ class Git(LazyMixin):
# we simply use the shell and redirect to nul. Its slower than CreateProcess, question
# is whether we really want to see all these messages. Its annoying no matter what.
if is_win:
- call(("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(proc.pid)), shell=True)
+ call(
+ ("TASKKILL /F /T /PID %s 2>nul 1>nul" % str(proc.pid)),
+ shell=True,
+ )
# END exception handling
def __del__(self) -> None:
@@ -468,15 +540,15 @@ class Git(LazyMixin):
return getattr(self.proc, attr)
# TODO: Bad choice to mimic `proc.wait()` but with different args.
- def wait(self, stderr: Union[None, str, bytes] = b'') -> int:
+ def wait(self, stderr: Union[None, str, bytes] = b"") -> int:
"""Wait for the process and return its status code.
:param stderr: Previously read value of stderr, in case stderr is already closed.
:warn: may deadlock if output or error pipes are used and not handled separately.
:raise GitCommandError: if the return status is not 0"""
if stderr is None:
- stderr_b = b''
- stderr_b = force_bytes(data=stderr, encoding='utf-8')
+ stderr_b = b""
+ stderr_b = force_bytes(data=stderr, encoding="utf-8")
status: Union[int, None]
if self.proc is not None:
status = self.proc.wait()
@@ -485,21 +557,25 @@ class Git(LazyMixin):
status = self.status
p_stderr = None
- def read_all_from_possibly_closed_stream(stream: Union[IO[bytes], None]) -> bytes:
+ def read_all_from_possibly_closed_stream(
+ stream: Union[IO[bytes], None]
+ ) -> bytes:
if stream:
try:
return stderr_b + force_bytes(stream.read())
except ValueError:
- return stderr_b or b''
+ return stderr_b or b""
else:
- return stderr_b or b''
+ return stderr_b or b""
# END status handling
if status != 0:
errstr = read_all_from_possibly_closed_stream(p_stderr)
- log.debug('AutoInterrupt wait stderr: %r' % (errstr,))
- raise GitCommandError(remove_password_if_present(self.args), status, errstr)
+ log.debug("AutoInterrupt wait stderr: %r" % (errstr,))
+ raise GitCommandError(
+ remove_password_if_present(self.args), status, errstr
+ )
return status
# END auto interrupt
@@ -513,12 +589,12 @@ class Git(LazyMixin):
If not all data is read to the end of the objects's lifetime, we read the
rest to assure the underlying stream continues to work"""
- __slots__: Tuple[str, ...] = ('_stream', '_nbr', '_size')
+ __slots__: Tuple[str, ...] = ("_stream", "_nbr", "_size")
def __init__(self, size: int, stream: IO[bytes]) -> None:
self._stream = stream
self._size = size
- self._nbr = 0 # num bytes read
+ self._nbr = 0 # num bytes read
# special case: if the object is empty, has null bytes, get the
# final newline right away.
@@ -529,7 +605,7 @@ class Git(LazyMixin):
def read(self, size: int = -1) -> bytes:
bytes_left = self._size - self._nbr
if bytes_left == 0:
- return b''
+ return b""
if size > -1:
# assure we don't try to read past our limit
size = min(bytes_left, size)
@@ -542,13 +618,13 @@ class Git(LazyMixin):
# check for depletion, read our final byte to make the stream usable by others
if self._size - self._nbr == 0:
- self._stream.read(1) # final newline
+ self._stream.read(1) # final newline
# END finish reading
return data
def readline(self, size: int = -1) -> bytes:
if self._nbr == self._size:
- return b''
+ return b""
# clamp size to lowest allowed value
bytes_left = self._size - self._nbr
@@ -589,7 +665,7 @@ class Git(LazyMixin):
return out
# skipcq: PYL-E0301
- def __iter__(self) -> 'Git.CatFileContentStream':
+ def __iter__(self) -> "Git.CatFileContentStream":
return self
def __next__(self) -> bytes:
@@ -634,7 +710,7 @@ class Git(LazyMixin):
"""A convenience method as it allows to call the command as if it was
an object.
:return: Callable object that will execute call _call_process with your arguments."""
- if name[0] == '_':
+ if name[0] == "_":
return LazyMixin.__getattr__(self, name)
return lambda *args, **kwargs: self._call_process(name, *args, **kwargs)
@@ -650,27 +726,31 @@ class Git(LazyMixin):
"""
self._persistent_git_options = self.transform_kwargs(
- split_single_char_options=True, **kwargs)
+ split_single_char_options=True, **kwargs
+ )
def _set_cache_(self, attr: str) -> None:
- if attr == '_version_info':
+ if attr == "_version_info":
# We only use the first 4 numbers, as everything else could be strings in fact (on windows)
- process_version = self._call_process('version') # should be as default *args and **kwargs used
- version_numbers = process_version.split(' ')[2]
-
- self._version_info = cast(Tuple[int, int, int, int],
- tuple(int(n) for n in version_numbers.split('.')[:4] if n.isdigit())
- )
+ process_version = self._call_process(
+ "version"
+ ) # should be as default *args and **kwargs used
+ version_numbers = process_version.split(" ")[2]
+
+ self._version_info = cast(
+ Tuple[int, int, int, int],
+ tuple(int(n) for n in version_numbers.split(".")[:4] if n.isdigit()),
+ )
else:
super(Git, self)._set_cache_(attr)
# END handle version info
- @ property
+ @property
def working_dir(self) -> Union[None, PathLike]:
""":return: Git directory we are working on"""
return self._working_dir
- @ property
+ @property
def version_info(self) -> Tuple[int, int, int, int]:
"""
:return: tuple(int, int, int, int) tuple with integers representing the major, minor
@@ -678,69 +758,72 @@ class Git(LazyMixin):
This value is generated on demand and is cached"""
return self._version_info
- @ overload
- def execute(self,
- command: Union[str, Sequence[Any]],
- *,
- as_process: Literal[True]
- ) -> 'AutoInterrupt':
+ @overload
+ def execute(
+ self, command: Union[str, Sequence[Any]], *, as_process: Literal[True]
+ ) -> "AutoInterrupt":
...
- @ overload
- def execute(self,
- command: Union[str, Sequence[Any]],
- *,
- as_process: Literal[False] = False,
- stdout_as_string: Literal[True]
- ) -> Union[str, Tuple[int, str, str]]:
+ @overload
+ def execute(
+ self,
+ command: Union[str, Sequence[Any]],
+ *,
+ as_process: Literal[False] = False,
+ stdout_as_string: Literal[True],
+ ) -> Union[str, Tuple[int, str, str]]:
...
- @ overload
- def execute(self,
- command: Union[str, Sequence[Any]],
- *,
- as_process: Literal[False] = False,
- stdout_as_string: Literal[False] = False
- ) -> Union[bytes, Tuple[int, bytes, str]]:
+ @overload
+ def execute(
+ self,
+ command: Union[str, Sequence[Any]],
+ *,
+ as_process: Literal[False] = False,
+ stdout_as_string: Literal[False] = False,
+ ) -> Union[bytes, Tuple[int, bytes, str]]:
...
- @ overload
- def execute(self,
- command: Union[str, Sequence[Any]],
- *,
- with_extended_output: Literal[False],
- as_process: Literal[False],
- stdout_as_string: Literal[True]
- ) -> str:
+ @overload
+ def execute(
+ self,
+ command: Union[str, Sequence[Any]],
+ *,
+ with_extended_output: Literal[False],
+ as_process: Literal[False],
+ stdout_as_string: Literal[True],
+ ) -> str:
...
- @ overload
- def execute(self,
- command: Union[str, Sequence[Any]],
- *,
- with_extended_output: Literal[False],
- as_process: Literal[False],
- stdout_as_string: Literal[False]
- ) -> bytes:
+ @overload
+ def execute(
+ self,
+ command: Union[str, Sequence[Any]],
+ *,
+ with_extended_output: Literal[False],
+ as_process: Literal[False],
+ stdout_as_string: Literal[False],
+ ) -> bytes:
...
- def execute(self,
- command: Union[str, Sequence[Any]],
- istream: Union[None, BinaryIO] = None,
- with_extended_output: bool = False,
- with_exceptions: bool = True,
- as_process: bool = False,
- output_stream: Union[None, BinaryIO] = None,
- stdout_as_string: bool = True,
- kill_after_timeout: Union[None, float] = None,
- with_stdout: bool = True,
- universal_newlines: bool = False,
- shell: Union[None, bool] = None,
- env: Union[None, Mapping[str, str]] = None,
- max_chunk_size: int = io.DEFAULT_BUFFER_SIZE,
- strip_newline_in_stdout: bool = True,
- **subprocess_kwargs: Any
- ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], AutoInterrupt]:
+ def execute(
+ self,
+ command: Union[str, Sequence[Any]],
+ istream: Union[None, BinaryIO] = None,
+ with_extended_output: bool = False,
+ with_exceptions: bool = True,
+ as_process: bool = False,
+ output_stream: Union[None, BinaryIO] = None,
+ stdout_as_string: bool = True,
+ kill_after_timeout: Union[None, float] = None,
+ with_stdout: bool = True,
+ universal_newlines: bool = False,
+ shell: Union[None, bool] = None,
+ env: Union[None, Mapping[str, str]] = None,
+ max_chunk_size: int = io.DEFAULT_BUFFER_SIZE,
+ strip_newline_in_stdout: bool = True,
+ **subprocess_kwargs: Any,
+ ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], AutoInterrupt]:
"""Handles executing the command on the shell and consumes and returns
the returned information (stdout)
@@ -831,8 +914,8 @@ class Git(LazyMixin):
you must update the execute_kwargs tuple housed in this module."""
# Remove password for the command if present
redacted_command = remove_password_if_present(command)
- if self.GIT_PYTHON_TRACE and (self.GIT_PYTHON_TRACE != 'full' or as_process):
- log.info(' '.join(redacted_command))
+ if self.GIT_PYTHON_TRACE and (self.GIT_PYTHON_TRACE != "full" or as_process):
+ log.info(" ".join(redacted_command))
# Allow the user to have the command executed in their working dir.
try:
@@ -858,33 +941,47 @@ class Git(LazyMixin):
if is_win:
cmd_not_found_exception = OSError
if kill_after_timeout is not None:
- raise GitCommandError(redacted_command, '"kill_after_timeout" feature is not supported on Windows.')
+ raise GitCommandError(
+ redacted_command,
+ '"kill_after_timeout" feature is not supported on Windows.',
+ )
else:
- cmd_not_found_exception = FileNotFoundError # NOQA # exists, flake8 unknown @UndefinedVariable
+ cmd_not_found_exception = (
+ FileNotFoundError # NOQA # exists, flake8 unknown @UndefinedVariable
+ )
# end handle
- stdout_sink = (PIPE
- if with_stdout
- else getattr(subprocess, 'DEVNULL', None) or open(os.devnull, 'wb'))
+ stdout_sink = (
+ PIPE
+ if with_stdout
+ else getattr(subprocess, "DEVNULL", None) or open(os.devnull, "wb")
+ )
istream_ok = "None"
if istream:
istream_ok = "<valid stream>"
- log.debug("Popen(%s, cwd=%s, universal_newlines=%s, shell=%s, istream=%s)",
- redacted_command, cwd, universal_newlines, shell, istream_ok)
+ log.debug(
+ "Popen(%s, cwd=%s, universal_newlines=%s, shell=%s, istream=%s)",
+ redacted_command,
+ cwd,
+ universal_newlines,
+ shell,
+ istream_ok,
+ )
try:
- proc = Popen(command,
- env=env,
- cwd=cwd,
- bufsize=-1,
- stdin=istream or DEVNULL,
- stderr=PIPE,
- stdout=stdout_sink,
- shell=shell is not None and shell or self.USE_SHELL,
- close_fds=is_posix, # unsupported on windows
- universal_newlines=universal_newlines,
- creationflags=PROC_CREATIONFLAGS,
- **subprocess_kwargs
- )
+ proc = Popen(
+ command,
+ env=env,
+ cwd=cwd,
+ bufsize=-1,
+ stdin=istream or DEVNULL,
+ stderr=PIPE,
+ stdout=stdout_sink,
+ shell=shell is not None and shell or self.USE_SHELL,
+ close_fds=is_posix, # unsupported on windows
+ universal_newlines=universal_newlines,
+ creationflags=PROC_CREATIONFLAGS,
+ **subprocess_kwargs,
+ )
except cmd_not_found_exception as err:
raise GitCommandNotFound(redacted_command, err) from err
@@ -897,9 +994,12 @@ class Git(LazyMixin):
return self.AutoInterrupt(proc, command)
def _kill_process(pid: int) -> None:
- """ Callback method to kill a process. """
- p = Popen(['ps', '--ppid', str(pid)], stdout=PIPE,
- creationflags=PROC_CREATIONFLAGS)
+ """Callback method to kill a process."""
+ p = Popen(
+ ["ps", "--ppid", str(pid)],
+ stdout=PIPE,
+ creationflags=PROC_CREATIONFLAGS,
+ )
child_pids = []
if p.stdout is not None:
for line in p.stdout:
@@ -909,29 +1009,32 @@ class Git(LazyMixin):
child_pids.append(int(local_pid))
try:
# Windows does not have SIGKILL, so use SIGTERM instead
- sig = getattr(signal, 'SIGKILL', signal.SIGTERM)
+ sig = getattr(signal, "SIGKILL", signal.SIGTERM)
os.kill(pid, sig)
for child_pid in child_pids:
try:
os.kill(child_pid, sig)
except OSError:
pass
- kill_check.set() # tell the main routine that the process was killed
+ kill_check.set() # tell the main routine that the process was killed
except OSError:
# It is possible that the process gets completed in the duration after timeout
# happens and before we try to kill the process.
pass
return
+
# end
if kill_after_timeout is not None:
kill_check = threading.Event()
- watchdog = threading.Timer(kill_after_timeout, _kill_process, args=(proc.pid,))
+ watchdog = threading.Timer(
+ kill_after_timeout, _kill_process, args=(proc.pid,)
+ )
# Wait for the process to return
status = 0
- stdout_value: Union[str, bytes] = b''
- stderr_value: Union[str, bytes] = b''
+ stdout_value: Union[str, bytes] = b""
+ stderr_value: Union[str, bytes] = b""
newline = "\n" if universal_newlines else b"\n"
try:
if output_stream is None:
@@ -941,8 +1044,10 @@ class Git(LazyMixin):
if kill_after_timeout is not None:
watchdog.cancel()
if kill_check.is_set():
- stderr_value = ('Timeout: the command "%s" did not complete in %d '
- 'secs.' % (" ".join(redacted_command), kill_after_timeout))
+ stderr_value = (
+ 'Timeout: the command "%s" did not complete in %d '
+ "secs." % (" ".join(redacted_command), kill_after_timeout)
+ )
if not universal_newlines:
stderr_value = stderr_value.encode(defenc)
# strip trailing "\n"
@@ -953,12 +1058,16 @@ class Git(LazyMixin):
status = proc.returncode
else:
- max_chunk_size = max_chunk_size if max_chunk_size and max_chunk_size > 0 else io.DEFAULT_BUFFER_SIZE
+ max_chunk_size = (
+ max_chunk_size
+ if max_chunk_size and max_chunk_size > 0
+ else io.DEFAULT_BUFFER_SIZE
+ )
stream_copy(proc.stdout, output_stream, max_chunk_size)
stdout_value = proc.stdout.read()
stderr_value = proc.stderr.read()
# strip trailing "\n"
- if stderr_value.endswith(newline): # type: ignore
+ if stderr_value.endswith(newline): # type: ignore
stderr_value = stderr_value[:-1]
status = proc.wait()
# END stdout handling
@@ -966,18 +1075,28 @@ class Git(LazyMixin):
proc.stdout.close()
proc.stderr.close()
- if self.GIT_PYTHON_TRACE == 'full':
+ if self.GIT_PYTHON_TRACE == "full":
cmdstr = " ".join(redacted_command)
def as_text(stdout_value: Union[bytes, str]) -> str:
- return not output_stream and safe_decode(stdout_value) or '<OUTPUT_STREAM>'
+ return (
+ not output_stream and safe_decode(stdout_value) or "<OUTPUT_STREAM>"
+ )
+
# end
if stderr_value:
- log.info("%s -> %d; stdout: '%s'; stderr: '%s'",
- cmdstr, status, as_text(stdout_value), safe_decode(stderr_value))
+ log.info(
+ "%s -> %d; stdout: '%s'; stderr: '%s'",
+ cmdstr,
+ status,
+ as_text(stdout_value),
+ safe_decode(stderr_value),
+ )
elif stdout_value:
- log.info("%s -> %d; stdout: '%s'", cmdstr, status, as_text(stdout_value))
+ log.info(
+ "%s -> %d; stdout: '%s'", cmdstr, status, as_text(stdout_value)
+ )
else:
log.info("%s -> %d", cmdstr, status)
# END handle debug printing
@@ -985,7 +1104,9 @@ class Git(LazyMixin):
if with_exceptions and status != 0:
raise GitCommandError(redacted_command, status, stderr_value, stdout_value)
- if isinstance(stdout_value, bytes) and stdout_as_string: # could also be output_stream
+ if (
+ isinstance(stdout_value, bytes) and stdout_as_string
+ ): # could also be output_stream
stdout_value = safe_decode(stdout_value)
# Allow access to the command's status code
@@ -1042,7 +1163,9 @@ class Git(LazyMixin):
finally:
self.update_environment(**old_env)
- def transform_kwarg(self, name: str, value: Any, split_single_char_options: bool) -> List[str]:
+ def transform_kwarg(
+ self, name: str, value: Any, split_single_char_options: bool
+ ) -> List[str]:
if len(name) == 1:
if value is True:
return ["-%s" % name]
@@ -1058,7 +1181,9 @@ class Git(LazyMixin):
return ["--%s=%s" % (dashify(name), value)]
return []
- def transform_kwargs(self, split_single_char_options: bool = True, **kwargs: Any) -> List[str]:
+ def transform_kwargs(
+ self, split_single_char_options: bool = True, **kwargs: Any
+ ) -> List[str]:
"""Transforms Python style kwargs into git command line options."""
args = []
for k, v in kwargs.items():
@@ -1081,7 +1206,7 @@ class Git(LazyMixin):
return outlist
- def __call__(self, **kwargs: Any) -> 'Git':
+ def __call__(self, **kwargs: Any) -> "Git":
"""Specify command line options to the git executable
for a subcommand call
@@ -1094,28 +1219,34 @@ class Git(LazyMixin):
``Examples``::
git(work_tree='/tmp').difftool()"""
self._git_options = self.transform_kwargs(
- split_single_char_options=True, **kwargs)
+ split_single_char_options=True, **kwargs
+ )
return self
@overload
- def _call_process(self, method: str, *args: None, **kwargs: None
- ) -> str:
+ def _call_process(self, method: str, *args: None, **kwargs: None) -> str:
... # if no args given, execute called with all defaults
@overload
- def _call_process(self, method: str,
- istream: int,
- as_process: Literal[True],
- *args: Any, **kwargs: Any
- ) -> 'Git.AutoInterrupt': ...
+ def _call_process(
+ self,
+ method: str,
+ istream: int,
+ as_process: Literal[True],
+ *args: Any,
+ **kwargs: Any,
+ ) -> "Git.AutoInterrupt":
+ ...
@overload
- def _call_process(self, method: str, *args: Any, **kwargs: Any
- ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], 'Git.AutoInterrupt']:
+ def _call_process(
+ self, method: str, *args: Any, **kwargs: Any
+ ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], "Git.AutoInterrupt"]:
...
- def _call_process(self, method: str, *args: Any, **kwargs: Any
- ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], 'Git.AutoInterrupt']:
+ def _call_process(
+ self, method: str, *args: Any, **kwargs: Any
+ ) -> Union[str, bytes, Tuple[int, Union[str, bytes], str], "Git.AutoInterrupt"]:
"""Run the given git command with the specified arguments and return
the result as a String
@@ -1145,13 +1276,13 @@ class Git(LazyMixin):
:return: Same as ``execute``
if no args given used execute default (esp. as_process = False, stdout_as_string = True)
- and return str """
+ and return str"""
# Handle optional arguments prior to calling transform_kwargs
# otherwise these'll end up in args, which is bad.
exec_kwargs = {k: v for k, v in kwargs.items() if k in execute_kwargs}
opts_kwargs = {k: v for k, v in kwargs.items() if k not in execute_kwargs}
- insert_after_this_arg = opts_kwargs.pop('insert_kwargs_after', None)
+ insert_after_this_arg = opts_kwargs.pop("insert_kwargs_after", None)
# Prepare the argument list
@@ -1164,10 +1295,12 @@ class Git(LazyMixin):
try:
index = ext_args.index(insert_after_this_arg)
except ValueError as err:
- raise ValueError("Couldn't find argument '%s' in args %s to insert cmd options after"
- % (insert_after_this_arg, str(ext_args))) from err
+ raise ValueError(
+ "Couldn't find argument '%s' in args %s to insert cmd options after"
+ % (insert_after_this_arg, str(ext_args))
+ ) from err
# end handle error
- args_list = ext_args[:index + 1] + opt_args + ext_args[index + 1:]
+ args_list = ext_args[: index + 1] + opt_args + ext_args[index + 1 :]
# end handle opts_kwargs
call = [self.GIT_PYTHON_GIT_EXECUTABLE]
@@ -1197,9 +1330,15 @@ class Git(LazyMixin):
tokens = header_line.split()
if len(tokens) != 3:
if not tokens:
- raise ValueError("SHA could not be resolved, git returned: %r" % (header_line.strip()))
+ raise ValueError(
+ "SHA could not be resolved, git returned: %r"
+ % (header_line.strip())
+ )
else:
- raise ValueError("SHA %s could not be resolved, git returned: %r" % (tokens[0], header_line.strip()))
+ raise ValueError(
+ "SHA %s could not be resolved, git returned: %r"
+ % (tokens[0], header_line.strip())
+ )
# END handle actual return value
# END error handling
@@ -1211,9 +1350,9 @@ class Git(LazyMixin):
# required for command to separate refs on stdin, as bytes
if isinstance(ref, bytes):
# Assume 40 bytes hexsha - bin-to-ascii for some reason returns bytes, not text
- refstr: str = ref.decode('ascii')
+ refstr: str = ref.decode("ascii")
elif not isinstance(ref, str):
- refstr = str(ref) # could be ref-object
+ refstr = str(ref) # could be ref-object
else:
refstr = ref
@@ -1221,8 +1360,9 @@ class Git(LazyMixin):
refstr += "\n"
return refstr.encode(defenc)
- def _get_persistent_cmd(self, attr_name: str, cmd_name: str, *args: Any, **kwargs: Any
- ) -> 'Git.AutoInterrupt':
+ def _get_persistent_cmd(
+ self, attr_name: str, cmd_name: str, *args: Any, **kwargs: Any
+ ) -> "Git.AutoInterrupt":
cur_val = getattr(self, attr_name)
if cur_val is not None:
return cur_val
@@ -1232,10 +1372,12 @@ class Git(LazyMixin):
cmd = self._call_process(cmd_name, *args, **options)
setattr(self, attr_name, cmd)
- cmd = cast('Git.AutoInterrupt', cmd)
+ cmd = cast("Git.AutoInterrupt", cmd)
return cmd
- def __get_object_header(self, cmd: 'Git.AutoInterrupt', ref: AnyStr) -> Tuple[str, str, int]:
+ def __get_object_header(
+ self, cmd: "Git.AutoInterrupt", ref: AnyStr
+ ) -> Tuple[str, str, int]:
if cmd.stdin and cmd.stdout:
cmd.stdin.write(self._prepare_ref(ref))
cmd.stdin.flush()
@@ -1244,7 +1386,7 @@ class Git(LazyMixin):
raise ValueError("cmd stdin was empty")
def get_object_header(self, ref: str) -> Tuple[str, str, int]:
- """ Use this method to quickly examine the type and size of the object behind
+ """Use this method to quickly examine the type and size of the object behind
the given ref.
:note: The method will only suffer from the costs of command invocation
@@ -1255,16 +1397,18 @@ class Git(LazyMixin):
return self.__get_object_header(cmd, ref)
def get_object_data(self, ref: str) -> Tuple[str, str, int, bytes]:
- """ As get_object_header, but returns object data as well
+ """As get_object_header, but returns object data as well
:return: (hexsha, type_string, size_as_int,data_string)
:note: not threadsafe"""
hexsha, typename, size, stream = self.stream_object_data(ref)
data = stream.read(size)
- del(stream)
+ del stream
return (hexsha, typename, size, data)
- def stream_object_data(self, ref: str) -> Tuple[str, str, int, 'Git.CatFileContentStream']:
- """ As get_object_header, but returns the data as a stream
+ def stream_object_data(
+ self, ref: str
+ ) -> Tuple[str, str, int, "Git.CatFileContentStream"]:
+ """As get_object_header, but returns the data as a stream
:return: (hexsha, type_string, size_as_int, stream)
:note: This method is not threadsafe, you need one independent Command instance per thread to be safe !"""
@@ -1273,7 +1417,7 @@ class Git(LazyMixin):
cmd_stdout = cmd.stdout if cmd.stdout is not None else io.BytesIO()
return (hexsha, typename, size, self.CatFileContentStream(size, cmd_stdout))
- def clear_cache(self) -> 'Git':
+ def clear_cache(self) -> "Git":
"""Clear all kinds of internal caches to release resources.
Currently persistent commands will be interrupted.
diff --git a/git/compat.py b/git/compat.py
index 988c04ef..e7ef28c3 100644
--- a/git/compat.py
+++ b/git/compat.py
@@ -12,8 +12,8 @@ import os
import sys
from gitdb.utils.encoding import (
- force_bytes, # @UnusedImport
- force_text # @UnusedImport
+ force_bytes, # @UnusedImport
+ force_text, # @UnusedImport
)
# typing --------------------------------------------------------------------
@@ -29,21 +29,24 @@ from typing import (
Union,
overload,
)
+
# ---------------------------------------------------------------------------
-is_win: bool = (os.name == 'nt')
-is_posix = (os.name == 'posix')
-is_darwin = (os.name == 'darwin')
+is_win: bool = os.name == "nt"
+is_posix = os.name == "posix"
+is_darwin = os.name == "darwin"
defenc = sys.getfilesystemencoding()
@overload
-def safe_decode(s: None) -> None: ...
+def safe_decode(s: None) -> None:
+ ...
@overload
-def safe_decode(s: AnyStr) -> str: ...
+def safe_decode(s: AnyStr) -> str:
+ ...
def safe_decode(s: Union[AnyStr, None]) -> Optional[str]:
@@ -51,19 +54,21 @@ def safe_decode(s: Union[AnyStr, None]) -> Optional[str]:
if isinstance(s, str):
return s
elif isinstance(s, bytes):
- return s.decode(defenc, 'surrogateescape')
+ return s.decode(defenc, "surrogateescape")
elif s is None:
return None
else:
- raise TypeError('Expected bytes or text, but got %r' % (s,))
+ raise TypeError("Expected bytes or text, but got %r" % (s,))
@overload
-def safe_encode(s: None) -> None: ...
+def safe_encode(s: None) -> None:
+ ...
@overload
-def safe_encode(s: AnyStr) -> bytes: ...
+def safe_encode(s: AnyStr) -> bytes:
+ ...
def safe_encode(s: Optional[AnyStr]) -> Optional[bytes]:
@@ -75,15 +80,17 @@ def safe_encode(s: Optional[AnyStr]) -> Optional[bytes]:
elif s is None:
return None
else:
- raise TypeError('Expected bytes or text, but got %r' % (s,))
+ raise TypeError("Expected bytes or text, but got %r" % (s,))
@overload
-def win_encode(s: None) -> None: ...
+def win_encode(s: None) -> None:
+ ...
@overload
-def win_encode(s: AnyStr) -> bytes: ...
+def win_encode(s: AnyStr) -> bytes:
+ ...
def win_encode(s: Optional[AnyStr]) -> Optional[bytes]:
@@ -93,5 +100,5 @@ def win_encode(s: Optional[AnyStr]) -> Optional[bytes]:
elif isinstance(s, bytes):
return s
elif s is not None:
- raise TypeError('Expected bytes or text, but got %r' % (s,))
+ raise TypeError("Expected bytes or text, but got %r" % (s,))
return None
diff --git a/git/config.py b/git/config.py
index 1ac3c9ce..24c2b201 100644
--- a/git/config.py
+++ b/git/config.py
@@ -30,8 +30,20 @@ import configparser as cp
# typing-------------------------------------------------------
-from typing import (Any, Callable, Generic, IO, List, Dict, Sequence,
- TYPE_CHECKING, Tuple, TypeVar, Union, cast)
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ IO,
+ List,
+ Dict,
+ Sequence,
+ TYPE_CHECKING,
+ Tuple,
+ TypeVar,
+ Union,
+ cast,
+)
from git.types import Lit_config_levels, ConfigLevels_Tup, PathLike, assert_never, _T
@@ -39,23 +51,25 @@ if TYPE_CHECKING:
from git.repo.base import Repo
from io import BytesIO
-T_ConfigParser = TypeVar('T_ConfigParser', bound='GitConfigParser')
-T_OMD_value = TypeVar('T_OMD_value', str, bytes, int, float, bool)
+T_ConfigParser = TypeVar("T_ConfigParser", bound="GitConfigParser")
+T_OMD_value = TypeVar("T_OMD_value", str, bytes, int, float, bool)
if sys.version_info[:3] < (3, 7, 2):
# typing.Ordereddict not added until py 3.7.2
from collections import OrderedDict
+
OrderedDict_OMD = OrderedDict
else:
from typing import OrderedDict
+
OrderedDict_OMD = OrderedDict[str, List[T_OMD_value]] # type: ignore[assignment, misc]
# -------------------------------------------------------------
-__all__ = ('GitConfigParser', 'SectionConstraint')
+__all__ = ("GitConfigParser", "SectionConstraint")
-log = logging.getLogger('git.config')
+log = logging.getLogger("git.config")
log.addHandler(logging.NullHandler())
# invariants
@@ -67,26 +81,37 @@ CONFIG_LEVELS: ConfigLevels_Tup = ("system", "user", "global", "repository")
# Section pattern to detect conditional includes.
# https://git-scm.com/docs/git-config#_conditional_includes
-CONDITIONAL_INCLUDE_REGEXP = re.compile(r"(?<=includeIf )\"(gitdir|gitdir/i|onbranch):(.+)\"")
+CONDITIONAL_INCLUDE_REGEXP = re.compile(
+ r"(?<=includeIf )\"(gitdir|gitdir/i|onbranch):(.+)\""
+)
class MetaParserBuilder(abc.ABCMeta):
"""Utility class wrapping base-class methods into decorators that assure read-only properties"""
- def __new__(cls, name: str, bases: Tuple, clsdict: Dict[str, Any]) -> 'MetaParserBuilder':
+
+ def __new__(
+ cls, name: str, bases: Tuple, clsdict: Dict[str, Any]
+ ) -> "MetaParserBuilder":
"""
Equip all base-class methods with a needs_values decorator, and all non-const methods
with a set_dirty_and_flush_changes decorator in addition to that."""
- kmm = '_mutating_methods_'
+ kmm = "_mutating_methods_"
if kmm in clsdict:
mutating_methods = clsdict[kmm]
for base in bases:
- methods = (t for t in inspect.getmembers(base, inspect.isroutine) if not t[0].startswith("_"))
+ methods = (
+ t
+ for t in inspect.getmembers(base, inspect.isroutine)
+ if not t[0].startswith("_")
+ )
for name, method in methods:
if name in clsdict:
continue
method_with_values = needs_values(method)
if name in mutating_methods:
- method_with_values = set_dirty_and_flush_changes(method_with_values)
+ method_with_values = set_dirty_and_flush_changes(
+ method_with_values
+ )
# END mutating methods handling
clsdict[name] = method_with_values
@@ -102,9 +127,10 @@ def needs_values(func: Callable[..., _T]) -> Callable[..., _T]:
"""Returns method assuring we read values (on demand) before we try to access them"""
@wraps(func)
- def assure_data_present(self: 'GitConfigParser', *args: Any, **kwargs: Any) -> _T:
+ def assure_data_present(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T:
self.read()
return func(self, *args, **kwargs)
+
# END wrapper method
return assure_data_present
@@ -114,11 +140,12 @@ def set_dirty_and_flush_changes(non_const_func: Callable[..., _T]) -> Callable[.
If so, the instance will be set dirty.
Additionally, we flush the changes right to disk"""
- def flush_changes(self: 'GitConfigParser', *args: Any, **kwargs: Any) -> _T:
+ def flush_changes(self: "GitConfigParser", *args: Any, **kwargs: Any) -> _T:
rval = non_const_func(self, *args, **kwargs)
self._dirty = True
self.write()
return rval
+
# END wrapper method
flush_changes.__name__ = non_const_func.__name__
return flush_changes
@@ -133,9 +160,21 @@ class SectionConstraint(Generic[T_ConfigParser]):
:note:
If used as a context manager, will release the wrapped ConfigParser."""
+
__slots__ = ("_config", "_section_name")
- _valid_attrs_ = ("get_value", "set_value", "get", "set", "getint", "getfloat", "getboolean", "has_option",
- "remove_section", "remove_option", "options")
+ _valid_attrs_ = (
+ "get_value",
+ "set_value",
+ "get",
+ "set",
+ "getint",
+ "getfloat",
+ "getboolean",
+ "has_option",
+ "remove_section",
+ "remove_option",
+ "options",
+ )
def __init__(self, config: T_ConfigParser, section: str) -> None:
self._config = config
@@ -166,11 +205,13 @@ class SectionConstraint(Generic[T_ConfigParser]):
"""Equivalent to GitConfigParser.release(), which is called on our underlying parser instance"""
return self._config.release()
- def __enter__(self) -> 'SectionConstraint[T_ConfigParser]':
+ def __enter__(self) -> "SectionConstraint[T_ConfigParser]":
self._config.__enter__()
return self
- def __exit__(self, exception_type: str, exception_value: str, traceback: str) -> None:
+ def __exit__(
+ self, exception_type: str, exception_value: str, traceback: str
+ ) -> None:
self._config.__exit__(exception_type, exception_value, traceback)
@@ -228,16 +269,22 @@ def get_config_path(config_level: Lit_config_levels) -> str:
if config_level == "system":
return "/etc/gitconfig"
elif config_level == "user":
- config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(os.environ.get("HOME", '~'), ".config")
+ config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(
+ os.environ.get("HOME", "~"), ".config"
+ )
return osp.normpath(osp.expanduser(osp.join(config_home, "git", "config")))
elif config_level == "global":
return osp.normpath(osp.expanduser("~/.gitconfig"))
elif config_level == "repository":
- raise ValueError("No repo to get repository configuration from. Use Repo._get_config_path")
+ raise ValueError(
+ "No repo to get repository configuration from. Use Repo._get_config_path"
+ )
else:
# Should not reach here. Will raise ValueError if does. Static typing will warn missing elifs
- assert_never(config_level, # type: ignore[unreachable]
- ValueError(f"Invalid configuration level: {config_level!r}"))
+ assert_never(
+ config_level, # type: ignore[unreachable]
+ ValueError(f"Invalid configuration level: {config_level!r}"),
+ )
class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
@@ -258,30 +305,36 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
must match perfectly.
If used as a context manager, will release the locked file."""
- #{ Configuration
+ # { Configuration
# The lock type determines the type of lock to use in new configuration readers.
# They must be compatible to the LockFile interface.
# A suitable alternative would be the BlockingLockFile
t_lock = LockFile
- re_comment = re.compile(r'^\s*[#;]')
+ re_comment = re.compile(r"^\s*[#;]")
- #} END configuration
+ # } END configuration
- optvalueonly_source = r'\s*(?P<option>[^:=\s][^:=]*)'
+ optvalueonly_source = r"\s*(?P<option>[^:=\s][^:=]*)"
OPTVALUEONLY = re.compile(optvalueonly_source)
- OPTCRE = re.compile(optvalueonly_source + r'\s*(?P<vi>[:=])\s*' + r'(?P<value>.*)$')
+ OPTCRE = re.compile(optvalueonly_source + r"\s*(?P<vi>[:=])\s*" + r"(?P<value>.*)$")
del optvalueonly_source
# list of RawConfigParser methods able to change the instance
_mutating_methods_ = ("add_section", "remove_section", "remove_option", "set")
- def __init__(self, file_or_files: Union[None, PathLike, 'BytesIO', Sequence[Union[PathLike, 'BytesIO']]] = None,
- read_only: bool = True, merge_includes: bool = True,
- config_level: Union[Lit_config_levels, None] = None,
- repo: Union['Repo', None] = None) -> None:
+ def __init__(
+ self,
+ file_or_files: Union[
+ None, PathLike, "BytesIO", Sequence[Union[PathLike, "BytesIO"]]
+ ] = None,
+ read_only: bool = True,
+ merge_includes: bool = True,
+ config_level: Union[Lit_config_levels, None] = None,
+ repo: Union["Repo", None] = None,
+ ) -> None:
"""Initialize a configuration reader to read the given file_or_files and to
possibly allow changes to it by setting read_only False
@@ -303,22 +356,28 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
cp.RawConfigParser.__init__(self, dict_type=_OMD)
self._dict: Callable[..., _OMD] # type: ignore # mypy/typeshed bug?
self._defaults: _OMD
- self._sections: _OMD # type: ignore # mypy/typeshed bug?
+ self._sections: _OMD # type: ignore # mypy/typeshed bug?
# Used in python 3, needs to stay in sync with sections for underlying implementation to work
- if not hasattr(self, '_proxies'):
+ if not hasattr(self, "_proxies"):
self._proxies = self._dict()
if file_or_files is not None:
- self._file_or_files: Union[PathLike, 'BytesIO', Sequence[Union[PathLike, 'BytesIO']]] = file_or_files
+ self._file_or_files: Union[
+ PathLike, "BytesIO", Sequence[Union[PathLike, "BytesIO"]]
+ ] = file_or_files
else:
if config_level is None:
if read_only:
- self._file_or_files = [get_config_path(cast(Lit_config_levels, f))
- for f in CONFIG_LEVELS
- if f != 'repository']
+ self._file_or_files = [
+ get_config_path(cast(Lit_config_levels, f))
+ for f in CONFIG_LEVELS
+ if f != "repository"
+ ]
else:
- raise ValueError("No configuration level or configuration files specified")
+ raise ValueError(
+ "No configuration level or configuration files specified"
+ )
else:
self._file_or_files = [get_config_path(config_level)]
@@ -327,7 +386,7 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
self._is_initialized = False
self._merge_includes = merge_includes
self._repo = repo
- self._lock: Union['LockFile', None] = None
+ self._lock: Union["LockFile", None] = None
self._acquire_lock()
def _acquire_lock(self) -> None:
@@ -337,7 +396,8 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
file_or_files = self._file_or_files
elif isinstance(self._file_or_files, (tuple, list, Sequence)):
raise ValueError(
- "Write-ConfigParsers can operate on a single file only, multiple files have been passed")
+ "Write-ConfigParsers can operate on a single file only, multiple files have been passed"
+ )
else:
file_or_files = self._file_or_files.name
@@ -354,7 +414,7 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
# NOTE: only consistent in PY2
self.release()
- def __enter__(self) -> 'GitConfigParser':
+ def __enter__(self) -> "GitConfigParser":
self._acquire_lock()
return self
@@ -374,7 +434,9 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
try:
self.write()
except IOError:
- log.error("Exception during destruction of GitConfigParser", exc_info=True)
+ log.error(
+ "Exception during destruction of GitConfigParser", exc_info=True
+ )
except ReferenceError:
# This happens in PY3 ... and usually means that some state cannot be written
# as the sections dict cannot be iterated
@@ -398,19 +460,20 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
Removed big comments to make it more compact.
Made sure it ignores initial whitespace as git uses tabs"""
- cursect = None # None, or a dictionary
+ cursect = None # None, or a dictionary
optname = None
lineno = 0
is_multi_line = False
- e = None # None, or an exception
+ e = None # None, or an exception
def string_decode(v: str) -> str:
- if v[-1] == '\\':
+ if v[-1] == "\\":
v = v[:-1]
# end cut trailing escapes to prevent decode error
- return v.encode(defenc).decode('unicode_escape')
+ return v.encode(defenc).decode("unicode_escape")
# end
+
# end
while True:
@@ -420,22 +483,22 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
break
lineno = lineno + 1
# comment or blank line?
- if line.strip() == '' or self.re_comment.match(line):
+ if line.strip() == "" or self.re_comment.match(line):
continue
- if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR":
+ if line.split(None, 1)[0].lower() == "rem" and line[0] in "rR":
# no leading whitespace
continue
# is it a section header?
mo = self.SECTCRE.match(line.strip())
if not is_multi_line and mo:
- sectname: str = mo.group('header').strip()
+ sectname: str = mo.group("header").strip()
if sectname in self._sections:
cursect = self._sections[sectname]
elif sectname == cp.DEFAULTSECT:
cursect = self._defaults
else:
- cursect = self._dict((('__name__', sectname),))
+ cursect = self._dict((("__name__", sectname),))
self._sections[sectname] = cursect
self._proxies[sectname] = None
# So sections can't start with a continuation line
@@ -448,14 +511,18 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
mo = self.OPTCRE.match(line)
if mo:
# We might just have handled the last line, which could contain a quotation we want to remove
- optname, vi, optval = mo.group('option', 'vi', 'value')
- if vi in ('=', ':') and ';' in optval and not optval.strip().startswith('"'):
- pos = optval.find(';')
+ optname, vi, optval = mo.group("option", "vi", "value")
+ if (
+ vi in ("=", ":")
+ and ";" in optval
+ and not optval.strip().startswith('"')
+ ):
+ pos = optval.find(";")
if pos != -1 and optval[pos - 1].isspace():
optval = optval[:pos]
optval = optval.strip()
if optval == '""':
- optval = ''
+ optval = ""
# end handle empty string
optname = self.optionxform(optname.rstrip())
if len(optval) > 1 and optval[0] == '"' and optval[-1] != '"':
@@ -518,11 +585,8 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
if keyword.endswith("/i"):
value = re.sub(
r"[a-zA-Z]",
- lambda m: "[{}{}]".format(
- m.group().lower(),
- m.group().upper()
- ),
- value
+ lambda m: "[{}{}]".format(m.group().lower(), m.group().upper()),
+ value,
)
if self._repo.git_dir:
if fnmatch.fnmatchcase(str(self._repo.git_dir), value):
@@ -557,7 +621,7 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
elif not isinstance(self._file_or_files, (tuple, list, Sequence)):
# could merge with above isinstance once runtime type known
files_to_read = [self._file_or_files]
- else: # for lists or tuples
+ else: # for lists or tuples
files_to_read = list(self._file_or_files)
# end assure we have a copy of the paths to handle
@@ -569,13 +633,15 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
if hasattr(file_path, "seek"):
# must be a file objectfile-object
- file_path = cast(IO[bytes], file_path) # replace with assert to narrow type, once sure
+ file_path = cast(
+ IO[bytes], file_path
+ ) # replace with assert to narrow type, once sure
self._read(file_path, file_path.name)
else:
# assume a path if it is not a file-object
file_path = cast(PathLike, file_path)
try:
- with open(file_path, 'rb') as fp:
+ with open(file_path, "rb") as fp:
file_ok = True
self._read(fp, fp.name)
except IOError:
@@ -585,14 +651,16 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
# We expect all paths to be normalized and absolute (and will assure that is the case)
if self._has_includes():
for _, include_path in self._included_paths():
- if include_path.startswith('~'):
+ if include_path.startswith("~"):
include_path = osp.expanduser(include_path)
if not osp.isabs(include_path):
if not file_ok:
continue
# end ignore relative paths if we don't know the configuration file path
file_path = cast(PathLike, file_path)
- assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work"
+ assert osp.isabs(
+ file_path
+ ), "Need absolute paths to be sure our cycle checks will work"
include_path = osp.join(osp.dirname(file_path), include_path)
# end make include path absolute
include_path = osp.normpath(include_path)
@@ -615,18 +683,27 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
def _write(self, fp: IO) -> None:
"""Write an .ini-format representation of the configuration state in
git compatible format"""
+
def write_section(name: str, section_dict: _OMD) -> None:
fp.write(("[%s]\n" % name).encode(defenc))
- values: Sequence[str] # runtime only gets str in tests, but should be whatever _OMD stores
+ values: Sequence[
+ str
+ ] # runtime only gets str in tests, but should be whatever _OMD stores
v: str
for (key, values) in section_dict.items_all():
if key == "__name__":
continue
for v in values:
- fp.write(("\t%s = %s\n" % (key, self._value_to_string(v).replace('\n', '\n\t'))).encode(defenc))
+ fp.write(
+ (
+ "\t%s = %s\n"
+ % (key, self._value_to_string(v).replace("\n", "\n\t"))
+ ).encode(defenc)
+ )
# END if key is not __name__
+
# END section writing
if self._defaults:
@@ -636,16 +713,20 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
for name, value in self._sections.items():
write_section(name, value)
- def items(self, section_name: str) -> List[Tuple[str, str]]: # type: ignore[override]
+ def items(self, section_name: str) -> List[Tuple[str, str]]: # type: ignore[override]
""":return: list((option, value), ...) pairs of all items in the given section"""
- return [(k, v) for k, v in super(GitConfigParser, self).items(section_name) if k != '__name__']
+ return [
+ (k, v)
+ for k, v in super(GitConfigParser, self).items(section_name)
+ if k != "__name__"
+ ]
def items_all(self, section_name: str) -> List[Tuple[str, List[str]]]:
""":return: list((option, [values...]), ...) pairs of all items in the given section"""
rv = _OMD(self._defaults)
for k, vs in self._sections[section_name].items_all():
- if k == '__name__':
+ if k == "__name__":
continue
if k in rv and rv.getall(k) == vs:
@@ -667,20 +748,26 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
return None
if isinstance(self._file_or_files, (list, tuple)):
- raise AssertionError("Cannot write back if there is not exactly a single file to write to, have %i files"
- % len(self._file_or_files))
+ raise AssertionError(
+ "Cannot write back if there is not exactly a single file to write to, have %i files"
+ % len(self._file_or_files)
+ )
# end assert multiple files
if self._has_includes():
- log.debug("Skipping write-back of configuration file as include files were merged in." +
- "Set merge_includes=False to prevent this.")
+ log.debug(
+ "Skipping write-back of configuration file as include files were merged in."
+ + "Set merge_includes=False to prevent this."
+ )
return None
# end
fp = self._file_or_files
# we have a physical file on disk, so get a lock
- is_file_lock = isinstance(fp, (str, os.PathLike, IOBase)) # can't use Pathlike until 3.5 dropped
+ is_file_lock = isinstance(
+ fp, (str, os.PathLike, IOBase)
+ ) # can't use Pathlike until 3.5 dropped
if is_file_lock and self._lock is not None: # else raise Error?
self._lock._obtain_lock()
@@ -689,16 +776,18 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
with open(fp, "wb") as fp_open:
self._write(fp_open)
else:
- fp = cast('BytesIO', fp)
+ fp = cast("BytesIO", fp)
fp.seek(0)
# make sure we do not overwrite into an existing file
- if hasattr(fp, 'truncate'):
+ if hasattr(fp, "truncate"):
fp.truncate()
self._write(fp)
def _assure_writable(self, method_name: str) -> None:
if self.read_only:
- raise IOError("Cannot execute non-constant method %s.%s" % (self, method_name))
+ raise IOError(
+ "Cannot execute non-constant method %s.%s" % (self, method_name)
+ )
def add_section(self, section: str) -> None:
"""Assures added options will stay in order"""
@@ -709,8 +798,12 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
""":return: True if this instance may change the configuration file"""
return self._read_only
- def get_value(self, section: str, option: str, default: Union[int, float, str, bool, None] = None
- ) -> Union[int, float, str, bool]:
+ def get_value(
+ self,
+ section: str,
+ option: str,
+ default: Union[int, float, str, bool, None] = None,
+ ) -> Union[int, float, str, bool]:
# can default or return type include bool?
"""Get an option's value.
@@ -733,8 +826,12 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
return self._string_to_value(valuestr)
- def get_values(self, section: str, option: str, default: Union[int, float, str, bool, None] = None
- ) -> List[Union[int, float, str, bool]]:
+ def get_values(
+ self,
+ section: str,
+ option: str,
+ default: Union[int, float, str, bool, None] = None,
+ ) -> List[Union[int, float, str, bool]]:
"""Get an option's values.
If multiple values are specified for this option in the section, all are
@@ -771,15 +868,16 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
# try boolean values as git uses them
vl = valuestr.lower()
- if vl == 'false':
+ if vl == "false":
return False
- if vl == 'true':
+ if vl == "true":
return True
if not isinstance(valuestr, str):
raise TypeError(
"Invalid value type: only int, long, float and str are allowed",
- valuestr)
+ valuestr,
+ )
return valuestr
@@ -790,7 +888,9 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
@needs_values
@set_dirty_and_flush_changes
- def set_value(self, section: str, option: str, value: Union[str, bytes, int, float, bool]) -> 'GitConfigParser':
+ def set_value(
+ self, section: str, option: str, value: Union[str, bytes, int, float, bool]
+ ) -> "GitConfigParser":
"""Sets the given option in section to the given value.
It will create the section if required, and will not throw as opposed to the default
ConfigParser 'set' method.
@@ -808,7 +908,9 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
@needs_values
@set_dirty_and_flush_changes
- def add_value(self, section: str, option: str, value: Union[str, bytes, int, float, bool]) -> 'GitConfigParser':
+ def add_value(
+ self, section: str, option: str, value: Union[str, bytes, int, float, bool]
+ ) -> "GitConfigParser":
"""Adds a value for the given option in section.
It will create the section if required, and will not throw as opposed to the default
ConfigParser 'set' method. The value becomes the new value of the option as returned
@@ -825,7 +927,7 @@ class GitConfigParser(cp.RawConfigParser, metaclass=MetaParserBuilder):
self._sections[section].add(option, self._value_to_string(value))
return self
- def rename_section(self, section: str, new_name: str) -> 'GitConfigParser':
+ def rename_section(self, section: str, new_name: str) -> "GitConfigParser":
"""rename the given section to new_name
:raise ValueError: if section doesn't exit
:raise ValueError: if a section with new_name does already exist
diff --git a/git/db.py b/git/db.py
index 3a7adc7d..a119f4ea 100644
--- a/git/db.py
+++ b/git/db.py
@@ -1,9 +1,6 @@
"""Module with our own gitdb implementation - it uses the git command"""
from git.util import bin_to_hex, hex_to_bin
-from gitdb.base import (
- OInfo,
- OStream
-)
+from gitdb.base import OInfo, OStream
from gitdb.db import GitDB # @UnusedImport
from gitdb.db import LooseObjectDB
@@ -21,7 +18,7 @@ if TYPE_CHECKING:
# --------------------------------------------------------
-__all__ = ('GitCmdObjectDB', 'GitDB')
+__all__ = ("GitCmdObjectDB", "GitDB")
class GitCmdObjectDB(LooseObjectDB):
@@ -34,7 +31,7 @@ class GitCmdObjectDB(LooseObjectDB):
have packs and the other implementations
"""
- def __init__(self, root_path: PathLike, git: 'Git') -> None:
+ def __init__(self, root_path: PathLike, git: "Git") -> None:
"""Initialize this instance with the root and a git command"""
super(GitCmdObjectDB, self).__init__(root_path)
self._git = git
@@ -45,7 +42,9 @@ class GitCmdObjectDB(LooseObjectDB):
def stream(self, binsha: bytes) -> OStream:
"""For now, all lookup is done by git itself"""
- hexsha, typename, size, stream = self._git.stream_object_data(bin_to_hex(binsha))
+ hexsha, typename, size, stream = self._git.stream_object_data(
+ bin_to_hex(binsha)
+ )
return OStream(hex_to_bin(hexsha), typename, size, stream)
# { Interface
@@ -63,4 +62,4 @@ class GitCmdObjectDB(LooseObjectDB):
raise BadObject(partial_hexsha) from e
# END handle exceptions
- #} END interface
+ # } END interface
diff --git a/git/diff.py b/git/diff.py
index cea66d7e..6526ed68 100644
--- a/git/diff.py
+++ b/git/diff.py
@@ -15,7 +15,19 @@ from .objects.util import mode_str_to_int
# typing ------------------------------------------------------------------
-from typing import Any, Iterator, List, Match, Optional, Tuple, Type, TypeVar, Union, TYPE_CHECKING, cast
+from typing import (
+ Any,
+ Iterator,
+ List,
+ Match,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ TYPE_CHECKING,
+ cast,
+)
from git.types import PathLike, Literal
if TYPE_CHECKING:
@@ -26,7 +38,7 @@ if TYPE_CHECKING:
from subprocess import Popen
from git import Git
-Lit_change_type = Literal['A', 'D', 'C', 'M', 'R', 'T', 'U']
+Lit_change_type = Literal["A", "D", "C", "M", "R", "T", "U"]
# def is_change_type(inp: str) -> TypeGuard[Lit_change_type]:
@@ -36,12 +48,12 @@ Lit_change_type = Literal['A', 'D', 'C', 'M', 'R', 'T', 'U']
# ------------------------------------------------------------------------
-__all__ = ('Diffable', 'DiffIndex', 'Diff', 'NULL_TREE')
+__all__ = ("Diffable", "DiffIndex", "Diff", "NULL_TREE")
# Special object to compare against the empty tree in diffs
NULL_TREE = object()
-_octal_byte_re = re.compile(b'\\\\([0-9]{3})')
+_octal_byte_re = re.compile(b"\\\\([0-9]{3})")
def _octal_repl(matchobj: Match) -> bytes:
@@ -52,19 +64,22 @@ def _octal_repl(matchobj: Match) -> bytes:
def decode_path(path: bytes, has_ab_prefix: bool = True) -> Optional[bytes]:
- if path == b'/dev/null':
+ if path == b"/dev/null":
return None
if path.startswith(b'"') and path.endswith(b'"'):
- path = (path[1:-1].replace(b'\\n', b'\n')
- .replace(b'\\t', b'\t')
- .replace(b'\\"', b'"')
- .replace(b'\\\\', b'\\'))
+ path = (
+ path[1:-1]
+ .replace(b"\\n", b"\n")
+ .replace(b"\\t", b"\t")
+ .replace(b'\\"', b'"')
+ .replace(b"\\\\", b"\\")
+ )
path = _octal_byte_re.sub(_octal_repl, path)
if has_ab_prefix:
- assert path.startswith(b'a/') or path.startswith(b'b/')
+ assert path.startswith(b"a/") or path.startswith(b"b/")
path = path[2:]
return path
@@ -77,14 +92,16 @@ class Diffable(object):
:note:
Subclasses require a repo member as it is the case for Object instances, for practical
reasons we do not derive from Object."""
+
__slots__ = ()
# standin indicating you want to diff against the index
class Index(object):
pass
- def _process_diff_args(self, args: List[Union[str, 'Diffable', Type['Diffable.Index'], object]]
- ) -> List[Union[str, 'Diffable', Type['Diffable.Index'], object]]:
+ def _process_diff_args(
+ self, args: List[Union[str, "Diffable", Type["Diffable.Index"], object]]
+ ) -> List[Union[str, "Diffable", Type["Diffable.Index"], object]]:
"""
:return:
possibly altered version of the given args list.
@@ -92,9 +109,13 @@ class Diffable(object):
Subclasses can use it to alter the behaviour of the superclass"""
return args
- def diff(self, other: Union[Type['Index'], 'Tree', 'Commit', None, str, object] = Index,
- paths: Union[PathLike, List[PathLike], Tuple[PathLike, ...], None] = None,
- create_patch: bool = False, **kwargs: Any) -> 'DiffIndex':
+ def diff(
+ self,
+ other: Union[Type["Index"], "Tree", "Commit", None, str, object] = Index,
+ paths: Union[PathLike, List[PathLike], Tuple[PathLike, ...], None] = None,
+ create_patch: bool = False,
+ **kwargs: Any
+ ) -> "DiffIndex":
"""Creates diffs between two items being trees, trees and index or an
index and the working tree. It will detect renames automatically.
@@ -125,11 +146,11 @@ class Diffable(object):
:note:
On a bare repository, 'other' needs to be provided as Index or as
as Tree/Commit, or a git command error will occur"""
- args: List[Union[PathLike, Diffable, Type['Diffable.Index'], object]] = []
- args.append("--abbrev=40") # we need full shas
- args.append("--full-index") # get full index paths, not only filenames
+ args: List[Union[PathLike, Diffable, Type["Diffable.Index"], object]] = []
+ args.append("--abbrev=40") # we need full shas
+ args.append("--full-index") # get full index paths, not only filenames
- args.append("-M") # check for renames, in both formats
+ args.append("-M") # check for renames, in both formats
if create_patch:
args.append("-p")
else:
@@ -138,23 +159,23 @@ class Diffable(object):
# in any way, assure we don't see colored output,
# fixes https://github.com/gitpython-developers/GitPython/issues/172
- args.append('--no-color')
+ args.append("--no-color")
if paths is not None and not isinstance(paths, (tuple, list)):
paths = [paths]
- if hasattr(self, 'Has_Repo'):
- self.repo: 'Repo' = self.repo
+ if hasattr(self, "Has_Repo"):
+ self.repo: "Repo" = self.repo
diff_cmd = self.repo.git.diff
if other is self.Index:
- args.insert(0, '--cached')
+ args.insert(0, "--cached")
elif other is NULL_TREE:
- args.insert(0, '-r') # recursive diff-tree
- args.insert(0, '--root')
+ args.insert(0, "-r") # recursive diff-tree
+ args.insert(0, "--root")
diff_cmd = self.repo.git.diff_tree
elif other is not None:
- args.insert(0, '-r') # recursive diff-tree
+ args.insert(0, "-r") # recursive diff-tree
args.insert(0, other)
diff_cmd = self.repo.git.diff_tree
@@ -166,19 +187,21 @@ class Diffable(object):
args.extend(paths)
# END paths handling
- kwargs['as_process'] = True
+ kwargs["as_process"] = True
proc = diff_cmd(*self._process_diff_args(args), **kwargs)
- diff_method = (Diff._index_from_patch_format
- if create_patch
- else Diff._index_from_raw_format)
+ diff_method = (
+ Diff._index_from_patch_format
+ if create_patch
+ else Diff._index_from_raw_format
+ )
index = diff_method(self.repo, proc)
proc.wait()
return index
-T_Diff = TypeVar('T_Diff', bound='Diff')
+T_Diff = TypeVar("T_Diff", bound="Diff")
class DiffIndex(List[T_Diff]):
@@ -187,6 +210,7 @@ class DiffIndex(List[T_Diff]):
the diff properties.
The class improves the diff handling convenience"""
+
# change type invariant identifying possible ways a blob can have changed
# A = Added
# D = Deleted
@@ -208,7 +232,7 @@ class DiffIndex(List[T_Diff]):
* 'R' for renamed paths
* 'M' for paths with modified data
* 'T' for changed in the type paths
- """
+ """
if change_type not in self.change_type:
raise ValueError("Invalid change type: %s" % change_type)
@@ -223,7 +247,12 @@ class DiffIndex(List[T_Diff]):
yield diffidx
elif change_type == "R" and diffidx.renamed:
yield diffidx
- elif change_type == "M" and diffidx.a_blob and diffidx.b_blob and diffidx.a_blob != diffidx.b_blob:
+ elif (
+ change_type == "M"
+ and diffidx.a_blob
+ and diffidx.b_blob
+ and diffidx.a_blob != diffidx.b_blob
+ ):
yield diffidx
# END for each diff
@@ -261,7 +290,8 @@ class Diff(object):
be different to the version in the index or tree, and hence has been modified."""
# precompiled regex
- re_header = re.compile(br"""
+ re_header = re.compile(
+ rb"""
^diff[ ]--git
[ ](?P<a_path_fallback>"?[ab]/.+?"?)[ ](?P<b_path_fallback>"?[ab]/.+?"?)\n
(?:^old[ ]mode[ ](?P<old_mode>\d+)\n
@@ -278,22 +308,48 @@ class Diff(object):
\.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
(?:^---[ ](?P<a_path>[^\t\n\r\f\v]*)[\t\r\f\v]*(?:\n|$))?
(?:^\+\+\+[ ](?P<b_path>[^\t\n\r\f\v]*)[\t\r\f\v]*(?:\n|$))?
- """, re.VERBOSE | re.MULTILINE)
+ """,
+ re.VERBOSE | re.MULTILINE,
+ )
# can be used for comparisons
NULL_HEX_SHA = "0" * 40
NULL_BIN_SHA = b"\0" * 20
- __slots__ = ("a_blob", "b_blob", "a_mode", "b_mode", "a_rawpath", "b_rawpath",
- "new_file", "deleted_file", "copied_file", "raw_rename_from",
- "raw_rename_to", "diff", "change_type", "score")
-
- def __init__(self, repo: 'Repo',
- a_rawpath: Optional[bytes], b_rawpath: Optional[bytes],
- a_blob_id: Union[str, bytes, None], b_blob_id: Union[str, bytes, None],
- a_mode: Union[bytes, str, None], b_mode: Union[bytes, str, None],
- new_file: bool, deleted_file: bool, copied_file: bool,
- raw_rename_from: Optional[bytes], raw_rename_to: Optional[bytes],
- diff: Union[str, bytes, None], change_type: Optional[Lit_change_type], score: Optional[int]) -> None:
+ __slots__ = (
+ "a_blob",
+ "b_blob",
+ "a_mode",
+ "b_mode",
+ "a_rawpath",
+ "b_rawpath",
+ "new_file",
+ "deleted_file",
+ "copied_file",
+ "raw_rename_from",
+ "raw_rename_to",
+ "diff",
+ "change_type",
+ "score",
+ )
+
+ def __init__(
+ self,
+ repo: "Repo",
+ a_rawpath: Optional[bytes],
+ b_rawpath: Optional[bytes],
+ a_blob_id: Union[str, bytes, None],
+ b_blob_id: Union[str, bytes, None],
+ a_mode: Union[bytes, str, None],
+ b_mode: Union[bytes, str, None],
+ new_file: bool,
+ deleted_file: bool,
+ copied_file: bool,
+ raw_rename_from: Optional[bytes],
+ raw_rename_to: Optional[bytes],
+ diff: Union[str, bytes, None],
+ change_type: Optional[Lit_change_type],
+ score: Optional[int],
+ ) -> None:
assert a_rawpath is None or isinstance(a_rawpath, bytes)
assert b_rawpath is None or isinstance(b_rawpath, bytes)
@@ -307,22 +363,26 @@ class Diff(object):
# we need to overwrite "repo" to the corresponding submodule's repo instead
if repo and a_rawpath:
for submodule in repo.submodules:
- if submodule.path == a_rawpath.decode(defenc, 'replace'):
+ if submodule.path == a_rawpath.decode(defenc, "replace"):
if submodule.module_exists():
repo = submodule.module()
break
- self.a_blob: Union['IndexObject', None]
+ self.a_blob: Union["IndexObject", None]
if a_blob_id is None or a_blob_id == self.NULL_HEX_SHA:
self.a_blob = None
else:
- self.a_blob = Blob(repo, hex_to_bin(a_blob_id), mode=self.a_mode, path=self.a_path)
+ self.a_blob = Blob(
+ repo, hex_to_bin(a_blob_id), mode=self.a_mode, path=self.a_path
+ )
- self.b_blob: Union['IndexObject', None]
+ self.b_blob: Union["IndexObject", None]
if b_blob_id is None or b_blob_id == self.NULL_HEX_SHA:
self.b_blob = None
else:
- self.b_blob = Blob(repo, hex_to_bin(b_blob_id), mode=self.b_mode, path=self.b_path)
+ self.b_blob = Blob(
+ repo, hex_to_bin(b_blob_id), mode=self.b_mode, path=self.b_path
+ )
self.new_file: bool = new_file
self.deleted_file: bool = deleted_file
@@ -358,10 +418,10 @@ class Diff(object):
elif self.b_blob:
h %= self.b_blob.path
- msg: str = ''
- line = None # temp line
- line_length = 0 # line length
- for b, n in zip((self.a_blob, self.b_blob), ('lhs', 'rhs')):
+ msg: str = ""
+ line = None # temp line
+ line_length = 0 # line length
+ for b, n in zip((self.a_blob, self.b_blob), ("lhs", "rhs")):
if b:
line = "\n%s: %o | %s" % (n, b.mode, b.hexsha)
else:
@@ -372,26 +432,30 @@ class Diff(object):
# END for each blob
# add headline
- h += '\n' + '=' * line_length
+ h += "\n" + "=" * line_length
if self.deleted_file:
- msg += '\nfile deleted in rhs'
+ msg += "\nfile deleted in rhs"
if self.new_file:
- msg += '\nfile added in rhs'
+ msg += "\nfile added in rhs"
if self.copied_file:
- msg += '\nfile %r copied from %r' % (self.b_path, self.a_path)
+ msg += "\nfile %r copied from %r" % (self.b_path, self.a_path)
if self.rename_from:
- msg += '\nfile renamed from %r' % self.rename_from
+ msg += "\nfile renamed from %r" % self.rename_from
if self.rename_to:
- msg += '\nfile renamed to %r' % self.rename_to
+ msg += "\nfile renamed to %r" % self.rename_to
if self.diff:
- msg += '\n---'
+ msg += "\n---"
try:
- msg += self.diff.decode(defenc) if isinstance(self.diff, bytes) else self.diff
+ msg += (
+ self.diff.decode(defenc)
+ if isinstance(self.diff, bytes)
+ else self.diff
+ )
except UnicodeDecodeError:
- msg += 'OMITTED BINARY DATA'
+ msg += "OMITTED BINARY DATA"
# end handle encoding
- msg += '\n---'
+ msg += "\n---"
# END diff info
# Python2 silliness: have to assure we convert our likely to be unicode object to a string with the
@@ -400,37 +464,44 @@ class Diff(object):
# end
return res
- @ property
+ @property
def a_path(self) -> Optional[str]:
- return self.a_rawpath.decode(defenc, 'replace') if self.a_rawpath else None
+ return self.a_rawpath.decode(defenc, "replace") if self.a_rawpath else None
- @ property
+ @property
def b_path(self) -> Optional[str]:
- return self.b_rawpath.decode(defenc, 'replace') if self.b_rawpath else None
+ return self.b_rawpath.decode(defenc, "replace") if self.b_rawpath else None
- @ property
+ @property
def rename_from(self) -> Optional[str]:
- return self.raw_rename_from.decode(defenc, 'replace') if self.raw_rename_from else None
+ return (
+ self.raw_rename_from.decode(defenc, "replace")
+ if self.raw_rename_from
+ else None
+ )
- @ property
+ @property
def rename_to(self) -> Optional[str]:
- return self.raw_rename_to.decode(defenc, 'replace') if self.raw_rename_to else None
+ return (
+ self.raw_rename_to.decode(defenc, "replace") if self.raw_rename_to else None
+ )
- @ property
+ @property
def renamed(self) -> bool:
""":returns: True if the blob of our diff has been renamed
:note: This property is deprecated, please use ``renamed_file`` instead.
"""
return self.renamed_file
- @ property
+ @property
def renamed_file(self) -> bool:
- """:returns: True if the blob of our diff has been renamed
- """
+ """:returns: True if the blob of our diff has been renamed"""
return self.rename_from != self.rename_to
- @ classmethod
- def _pick_best_path(cls, path_match: bytes, rename_match: bytes, path_fallback_match: bytes) -> Optional[bytes]:
+ @classmethod
+ def _pick_best_path(
+ cls, path_match: bytes, rename_match: bytes, path_fallback_match: bytes
+ ) -> Optional[bytes]:
if path_match:
return decode_path(path_match)
@@ -442,34 +513,51 @@ class Diff(object):
return None
- @ classmethod
- def _index_from_patch_format(cls, repo: 'Repo', proc: Union['Popen', 'Git.AutoInterrupt']) -> DiffIndex:
+ @classmethod
+ def _index_from_patch_format(
+ cls, repo: "Repo", proc: Union["Popen", "Git.AutoInterrupt"]
+ ) -> DiffIndex:
"""Create a new DiffIndex from the given text which must be in patch format
:param repo: is the repository we are operating on - it is required
:param stream: result of 'git diff' as a stream (supporting file protocol)
- :return: git.DiffIndex """
+ :return: git.DiffIndex"""
## FIXME: Here SLURPING raw, need to re-phrase header-regexes linewise.
text_list: List[bytes] = []
- handle_process_output(proc, text_list.append, None, finalize_process, decode_streams=False)
+ handle_process_output(
+ proc, text_list.append, None, finalize_process, decode_streams=False
+ )
# for now, we have to bake the stream
- text = b''.join(text_list)
- index: 'DiffIndex' = DiffIndex()
+ text = b"".join(text_list)
+ index: "DiffIndex" = DiffIndex()
previous_header: Union[Match[bytes], None] = None
header: Union[Match[bytes], None] = None
a_path, b_path = None, None # for mypy
a_mode, b_mode = None, None # for mypy
for _header in cls.re_header.finditer(text):
- a_path_fallback, b_path_fallback, \
- old_mode, new_mode, \
- rename_from, rename_to, \
- new_file_mode, deleted_file_mode, copied_file_name, \
- a_blob_id, b_blob_id, b_mode, \
- a_path, b_path = _header.groups()
-
- new_file, deleted_file, copied_file = \
- bool(new_file_mode), bool(deleted_file_mode), bool(copied_file_name)
+ (
+ a_path_fallback,
+ b_path_fallback,
+ old_mode,
+ new_mode,
+ rename_from,
+ rename_to,
+ new_file_mode,
+ deleted_file_mode,
+ copied_file_name,
+ a_blob_id,
+ b_blob_id,
+ b_mode,
+ a_path,
+ b_path,
+ ) = _header.groups()
+
+ new_file, deleted_file, copied_file = (
+ bool(new_file_mode),
+ bool(deleted_file_mode),
+ bool(copied_file_name),
+ )
a_path = cls._pick_best_path(a_path, rename_from, a_path_fallback)
b_path = cls._pick_best_path(b_path, rename_to, b_path_fallback)
@@ -477,41 +565,53 @@ class Diff(object):
# Our only means to find the actual text is to see what has not been matched by our regex,
# and then retro-actively assign it to our index
if previous_header is not None:
- index[-1].diff = text[previous_header.end():_header.start()]
+ index[-1].diff = text[previous_header.end() : _header.start()]
# end assign actual diff
# Make sure the mode is set if the path is set. Otherwise the resulting blob is invalid
# We just use the one mode we should have parsed
- a_mode = old_mode or deleted_file_mode or (a_path and (b_mode or new_mode or new_file_mode))
+ a_mode = (
+ old_mode
+ or deleted_file_mode
+ or (a_path and (b_mode or new_mode or new_file_mode))
+ )
b_mode = b_mode or new_mode or new_file_mode or (b_path and a_mode)
- index.append(Diff(repo,
- a_path,
- b_path,
- a_blob_id and a_blob_id.decode(defenc),
- b_blob_id and b_blob_id.decode(defenc),
- a_mode and a_mode.decode(defenc),
- b_mode and b_mode.decode(defenc),
- new_file, deleted_file, copied_file,
- rename_from,
- rename_to,
- None, None, None))
+ index.append(
+ Diff(
+ repo,
+ a_path,
+ b_path,
+ a_blob_id and a_blob_id.decode(defenc),
+ b_blob_id and b_blob_id.decode(defenc),
+ a_mode and a_mode.decode(defenc),
+ b_mode and b_mode.decode(defenc),
+ new_file,
+ deleted_file,
+ copied_file,
+ rename_from,
+ rename_to,
+ None,
+ None,
+ None,
+ )
+ )
previous_header = _header
header = _header
# end for each header we parse
if index and header:
- index[-1].diff = text[header.end():]
+ index[-1].diff = text[header.end() :]
# end assign last diff
return index
- @ staticmethod
- def _handle_diff_line(lines_bytes: bytes, repo: 'Repo', index: DiffIndex) -> None:
+ @staticmethod
+ def _handle_diff_line(lines_bytes: bytes, repo: "Repo", index: DiffIndex) -> None:
lines = lines_bytes.decode(defenc)
- for line in lines.split(':')[1:]:
- meta, _, path = line.partition('\x00')
- path = path.rstrip('\x00')
+ for line in lines.split(":")[1:]:
+ meta, _, path = line.partition("\x00")
+ path = path.rstrip("\x00")
a_blob_id: Optional[str]
b_blob_id: Optional[str]
old_mode, new_mode, a_blob_id, b_blob_id, _change_type = meta.split(None, 4)
@@ -520,7 +620,7 @@ class Diff(object):
# 100: score (in case of copy and rename)
# assert is_change_type(_change_type[0]), f"Unexpected value for change_type received: {_change_type[0]}"
change_type: Lit_change_type = cast(Lit_change_type, _change_type[0])
- score_str = ''.join(_change_type[1:])
+ score_str = "".join(_change_type[1:])
score = int(score_str) if score_str.isdigit() else None
path = path.strip()
a_path = path.encode(defenc)
@@ -533,41 +633,60 @@ class Diff(object):
# NOTE: We cannot conclude from the existence of a blob to change type
# as diffs with the working do not have blobs yet
- if change_type == 'D':
+ if change_type == "D":
b_blob_id = None # Optional[str]
deleted_file = True
- elif change_type == 'A':
+ elif change_type == "A":
a_blob_id = None
new_file = True
- elif change_type == 'C':
+ elif change_type == "C":
copied_file = True
- a_path_str, b_path_str = path.split('\x00', 1)
+ a_path_str, b_path_str = path.split("\x00", 1)
a_path = a_path_str.encode(defenc)
b_path = b_path_str.encode(defenc)
- elif change_type == 'R':
- a_path_str, b_path_str = path.split('\x00', 1)
+ elif change_type == "R":
+ a_path_str, b_path_str = path.split("\x00", 1)
a_path = a_path_str.encode(defenc)
b_path = b_path_str.encode(defenc)
rename_from, rename_to = a_path, b_path
- elif change_type == 'T':
+ elif change_type == "T":
# Nothing to do
pass
# END add/remove handling
- diff = Diff(repo, a_path, b_path, a_blob_id, b_blob_id, old_mode, new_mode,
- new_file, deleted_file, copied_file, rename_from, rename_to,
- '', change_type, score)
+ diff = Diff(
+ repo,
+ a_path,
+ b_path,
+ a_blob_id,
+ b_blob_id,
+ old_mode,
+ new_mode,
+ new_file,
+ deleted_file,
+ copied_file,
+ rename_from,
+ rename_to,
+ "",
+ change_type,
+ score,
+ )
index.append(diff)
- @ classmethod
- def _index_from_raw_format(cls, repo: 'Repo', proc: 'Popen') -> 'DiffIndex':
+ @classmethod
+ def _index_from_raw_format(cls, repo: "Repo", proc: "Popen") -> "DiffIndex":
"""Create a new DiffIndex from the given stream which must be in raw format.
:return: git.DiffIndex"""
# handles
# :100644 100644 687099101... 37c5e30c8... M .gitignore
- index: 'DiffIndex' = DiffIndex()
- handle_process_output(proc, lambda byt: cls._handle_diff_line(byt, repo, index),
- None, finalize_process, decode_streams=False)
+ index: "DiffIndex" = DiffIndex()
+ handle_process_output(
+ proc,
+ lambda byt: cls._handle_diff_line(byt, repo, index),
+ None,
+ finalize_process,
+ decode_streams=False,
+ )
return index
diff --git a/git/exc.py b/git/exc.py
index 045ea9d2..487ce179 100644
--- a/git/exc.py
+++ b/git/exc.py
@@ -6,7 +6,7 @@
""" Module containing all exceptions thrown throughout the git package, """
from gitdb.exc import BadName # NOQA @UnusedWildImport skipcq: PYL-W0401, PYL-W0614
-from gitdb.exc import * # NOQA @UnusedWildImport skipcq: PYL-W0401, PYL-W0614
+from gitdb.exc import * # NOQA @UnusedWildImport skipcq: PYL-W0401, PYL-W0614
from git.compat import safe_decode
from git.util import remove_password_if_present
@@ -22,19 +22,19 @@ if TYPE_CHECKING:
class GitError(Exception):
- """ Base class for all package exceptions """
+ """Base class for all package exceptions"""
class InvalidGitRepositoryError(GitError):
- """ Thrown if the given repository appears to have an invalid format. """
+ """Thrown if the given repository appears to have an invalid format."""
class WorkTreeRepositoryUnsupported(InvalidGitRepositoryError):
- """ Thrown to indicate we can't handle work tree repositories """
+ """Thrown to indicate we can't handle work tree repositories"""
class NoSuchPathError(GitError, OSError):
- """ Thrown if a path could not be access by the system. """
+ """Thrown if a path could not be access by the system."""
class CommandError(GitError):
@@ -49,10 +49,13 @@ class CommandError(GitError):
#: "'%s' failed%s"
_msg = "Cmd('%s') failed%s"
- def __init__(self, command: Union[List[str], Tuple[str, ...], str],
- status: Union[str, int, None, Exception] = None,
- stderr: Union[bytes, str, None] = None,
- stdout: Union[bytes, str, None] = None) -> None:
+ def __init__(
+ self,
+ command: Union[List[str], Tuple[str, ...], str],
+ status: Union[str, int, None, Exception] = None,
+ stderr: Union[bytes, str, None] = None,
+ stdout: Union[bytes, str, None] = None,
+ ) -> None:
if not isinstance(command, (tuple, list)):
command = command.split()
self.command = remove_password_if_present(command)
@@ -62,41 +65,50 @@ class CommandError(GitError):
status = "%s('%s')" % (type(status).__name__, safe_decode(str(status)))
else:
try:
- status = 'exit code(%s)' % int(status)
+ status = "exit code(%s)" % int(status)
except (ValueError, TypeError):
s = safe_decode(str(status))
status = "'%s'" % s if isinstance(status, str) else s
self._cmd = safe_decode(self.command[0])
- self._cmdline = ' '.join(safe_decode(i) for i in self.command)
+ self._cmdline = " ".join(safe_decode(i) for i in self.command)
self._cause = status and " due to: %s" % status or "!"
stdout_decode = safe_decode(stdout)
stderr_decode = safe_decode(stderr)
- self.stdout = stdout_decode and "\n stdout: '%s'" % stdout_decode or ''
- self.stderr = stderr_decode and "\n stderr: '%s'" % stderr_decode or ''
+ self.stdout = stdout_decode and "\n stdout: '%s'" % stdout_decode or ""
+ self.stderr = stderr_decode and "\n stderr: '%s'" % stderr_decode or ""
def __str__(self) -> str:
return (self._msg + "\n cmdline: %s%s%s") % (
- self._cmd, self._cause, self._cmdline, self.stdout, self.stderr)
+ self._cmd,
+ self._cause,
+ self._cmdline,
+ self.stdout,
+ self.stderr,
+ )
class GitCommandNotFound(CommandError):
"""Thrown if we cannot find the `git` executable in the PATH or at the path given by
the GIT_PYTHON_GIT_EXECUTABLE environment variable"""
- def __init__(self, command: Union[List[str], Tuple[str], str], cause: Union[str, Exception]) -> None:
+ def __init__(
+ self, command: Union[List[str], Tuple[str], str], cause: Union[str, Exception]
+ ) -> None:
super(GitCommandNotFound, self).__init__(command, cause)
self._msg = "Cmd('%s') not found%s"
class GitCommandError(CommandError):
- """ Thrown if execution of the git command fails with non-zero status code. """
-
- def __init__(self, command: Union[List[str], Tuple[str, ...], str],
- status: Union[str, int, None, Exception] = None,
- stderr: Union[bytes, str, None] = None,
- stdout: Union[bytes, str, None] = None,
- ) -> None:
+ """Thrown if execution of the git command fails with non-zero status code."""
+
+ def __init__(
+ self,
+ command: Union[List[str], Tuple[str, ...], str],
+ status: Union[str, int, None, Exception] = None,
+ stderr: Union[bytes, str, None] = None,
+ stdout: Union[bytes, str, None] = None,
+ ) -> None:
super(GitCommandError, self).__init__(command, status, stderr, stdout)
@@ -114,8 +126,13 @@ class CheckoutError(GitError):
were checked out successfully and hence match the version stored in the
index"""
- def __init__(self, message: str, failed_files: Sequence[PathLike], valid_files: Sequence[PathLike],
- failed_reasons: List[str]) -> None:
+ def __init__(
+ self,
+ message: str,
+ failed_files: Sequence[PathLike],
+ valid_files: Sequence[PathLike],
+ failed_reasons: List[str],
+ ) -> None:
Exception.__init__(self, message)
self.failed_files = failed_files
@@ -140,10 +157,13 @@ class HookExecutionError(CommandError):
"""Thrown if a hook exits with a non-zero exit code. It provides access to the exit code and the string returned
via standard output"""
- def __init__(self, command: Union[List[str], Tuple[str, ...], str],
- status: Union[str, int, None, Exception],
- stderr: Union[bytes, str, None] = None,
- stdout: Union[bytes, str, None] = None) -> None:
+ def __init__(
+ self,
+ command: Union[List[str], Tuple[str, ...], str],
+ status: Union[str, int, None, Exception],
+ stderr: Union[bytes, str, None] = None,
+ stdout: Union[bytes, str, None] = None,
+ ) -> None:
super(HookExecutionError, self).__init__(command, status, stderr, stdout)
self._msg = "Hook('%s') failed%s"
@@ -152,7 +172,7 @@ class HookExecutionError(CommandError):
class RepositoryDirtyError(GitError):
"""Thrown whenever an operation on a repository fails as it has uncommitted changes that would be overwritten"""
- def __init__(self, repo: 'Repo', message: str) -> None:
+ def __init__(self, repo: "Repo", message: str) -> None:
self.repo = repo
self.message = message
diff --git a/git/ext/gitdb b/git/ext/gitdb
-Subproject 1c976835c5d1779a28b9e11afd1656152db26a6
+Subproject 4762d99d978586fcdf08ade552f4712bfde6ef2
diff --git a/git/index/base.py b/git/index/base.py
index 00e51bf5..48894833 100644
--- a/git/index/base.py
+++ b/git/index/base.py
@@ -15,12 +15,7 @@ from git.compat import (
force_bytes,
defenc,
)
-from git.exc import (
- GitCommandError,
- CheckoutError,
- GitError,
- InvalidGitRepositoryError
-)
+from git.exc import GitCommandError, CheckoutError, GitError, InvalidGitRepositoryError
from git.objects import (
Blob,
Submodule,
@@ -36,7 +31,7 @@ from git.util import (
file_contents_ro,
to_native_path_linux,
unbare_repo,
- to_bin_sha
+ to_bin_sha,
)
from gitdb.base import IStream
from gitdb.db import MemoryDB
@@ -52,23 +47,32 @@ from .fun import (
write_tree_from_cache,
stat_mode_to_index_mode,
S_IFGITLINK,
- run_commit_hook
+ run_commit_hook,
)
from .typ import (
BaseIndexEntry,
IndexEntry,
)
-from .util import (
- TemporaryFileSwap,
- post_clear_cache,
- default_index,
- git_working_dir
-)
+from .util import TemporaryFileSwap, post_clear_cache, default_index, git_working_dir
# typing -----------------------------------------------------------------------------
-from typing import (Any, BinaryIO, Callable, Dict, IO, Iterable, Iterator, List, NoReturn,
- Sequence, TYPE_CHECKING, Tuple, Type, Union)
+from typing import (
+ Any,
+ BinaryIO,
+ Callable,
+ Dict,
+ IO,
+ Iterable,
+ Iterator,
+ List,
+ NoReturn,
+ Sequence,
+ TYPE_CHECKING,
+ Tuple,
+ Type,
+ Union,
+)
from git.types import Commit_ish, PathLike
@@ -85,7 +89,7 @@ Treeish = Union[Tree, Commit, str, bytes]
# ------------------------------------------------------------------------------------
-__all__ = ('IndexFile', 'CheckoutError')
+__all__ = ("IndexFile", "CheckoutError")
class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
@@ -110,11 +114,12 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
Make sure you use index.write() once you are done manipulating the index directly
before operating on it using the git command"""
+
__slots__ = ("repo", "version", "entries", "_extension_data", "_file_path")
- _VERSION = 2 # latest version we support
+ _VERSION = 2 # latest version we support
S_IFGITLINK = S_IFGITLINK # a submodule
- def __init__(self, repo: 'Repo', file_path: Union[PathLike, None] = None) -> None:
+ def __init__(self, repo: "Repo", file_path: Union[PathLike, None] = None) -> None:
"""Initialize this Index instance, optionally from the given ``file_path``.
If no file_path is given, we will be created from the current index file.
@@ -122,7 +127,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
repository's index on demand."""
self.repo = repo
self.version = self._VERSION
- self._extension_data = b''
+ self._extension_data = b""
self._file_path: PathLike = file_path or self._index_path()
def _set_cache_(self, attr: str) -> None:
@@ -152,40 +157,48 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
@property
def path(self) -> PathLike:
- """ :return: Path to the index file we are representing """
+ """:return: Path to the index file we are representing"""
return self._file_path
def _delete_entries_cache(self) -> None:
"""Safely clear the entries cache so it can be recreated"""
try:
- del(self.entries)
+ del self.entries
except AttributeError:
# fails in python 2.6.5 with this exception
pass
# END exception handling
- #{ Serializable Interface
+ # { Serializable Interface
- def _deserialize(self, stream: IO) -> 'IndexFile':
+ def _deserialize(self, stream: IO) -> "IndexFile":
"""Initialize this instance with index values read from the given stream"""
- self.version, self.entries, self._extension_data, _conten_sha = read_cache(stream)
+ self.version, self.entries, self._extension_data, _conten_sha = read_cache(
+ stream
+ )
return self
def _entries_sorted(self) -> List[IndexEntry]:
""":return: list of entries, in a sorted fashion, first by path, then by stage"""
return sorted(self.entries.values(), key=lambda e: (e.path, e.stage))
- def _serialize(self, stream: IO, ignore_extension_data: bool = False) -> 'IndexFile':
+ def _serialize(
+ self, stream: IO, ignore_extension_data: bool = False
+ ) -> "IndexFile":
entries = self._entries_sorted()
- extension_data = self._extension_data # type: Union[None, bytes]
+ extension_data = self._extension_data # type: Union[None, bytes]
if ignore_extension_data:
extension_data = None
write_cache(entries, stream, extension_data)
return self
- #} END serializable interface
+ # } END serializable interface
- def write(self, file_path: Union[None, PathLike] = None, ignore_extension_data: bool = False) -> None:
+ def write(
+ self,
+ file_path: Union[None, PathLike] = None,
+ ignore_extension_data: bool = False,
+ ) -> None:
"""Write the current state to our file path or to the given one
:param file_path:
@@ -229,7 +242,9 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
@post_clear_cache
@default_index
- def merge_tree(self, rhs: Treeish, base: Union[None, Treeish] = None) -> 'IndexFile':
+ def merge_tree(
+ self, rhs: Treeish, base: Union[None, Treeish] = None
+ ) -> "IndexFile":
"""Merge the given rhs treeish into the current index, possibly taking
a common base treeish into account.
@@ -252,7 +267,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
be raised at the first conflicting path. If you want to have proper
merge resolution to be done by yourself, you have to commit the changed
index ( or make a valid tree from it ) and retry with a three-way
- index.from_tree call. """
+ index.from_tree call."""
# -i : ignore working tree status
# --aggressive : handle more merge cases
# -m : do an actual merge
@@ -265,8 +280,8 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
return self
@classmethod
- def new(cls, repo: 'Repo', *tree_sha: Union[str, Tree]) -> 'IndexFile':
- """ Merge the given treeish revisions into a new index which is returned.
+ def new(cls, repo: "Repo", *tree_sha: Union[str, Tree]) -> "IndexFile":
+ """Merge the given treeish revisions into a new index which is returned.
This method behaves like git-read-tree --aggressive when doing the merge.
:param repo: The repository treeish are located in.
@@ -283,15 +298,18 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
inst = cls(repo)
# convert to entries dict
- entries: Dict[Tuple[PathLike, int], IndexEntry] = dict(zip(
- ((e.path, e.stage) for e in base_entries),
- (IndexEntry.from_base(e) for e in base_entries)))
+ entries: Dict[Tuple[PathLike, int], IndexEntry] = dict(
+ zip(
+ ((e.path, e.stage) for e in base_entries),
+ (IndexEntry.from_base(e) for e in base_entries),
+ )
+ )
inst.entries = entries
return inst
@classmethod
- def from_tree(cls, repo: 'Repo', *treeish: Treeish, **kwargs: Any) -> 'IndexFile':
+ def from_tree(cls, repo: "Repo", *treeish: Treeish, **kwargs: Any) -> "IndexFile":
"""Merge the given treeish revisions into a new index which is returned.
The original index will remain unaltered
@@ -326,7 +344,9 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
it will be temporarily moved out of the way to assure there are no unsuspected
interferences."""
if len(treeish) == 0 or len(treeish) > 3:
- raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish))
+ raise ValueError(
+ "Please specify between 1 and 3 treeish, got %i" % len(treeish)
+ )
arg_list: List[Union[Treeish, str]] = []
# ignore that working tree and index possibly are out of date
@@ -339,7 +359,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# tmp file created in git home directory to be sure renaming
# works - /tmp/ dirs could be on another device
- tmp_index = tempfile.mktemp('', '', repo.git_dir)
+ tmp_index = tempfile.mktemp("", "", repo.git_dir)
arg_list.append("--index-output=%s" % tmp_index)
arg_list.extend(treeish)
@@ -348,12 +368,12 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# Unfortunately there is no 'soft' way to do it.
# The TemporaryFileSwap assure the original file get put back
if repo.git_dir:
- index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index'))
+ index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, "index"))
try:
repo.git.read_tree(*arg_list, **kwargs)
index = cls(repo, tmp_index)
- index.entries # force it to read the file as we will delete the temp-file
- del(index_handler) # release as soon as possible
+ index.entries # force it to read the file as we will delete the temp-file
+ del index_handler # release as soon as possible
finally:
if osp.exists(tmp_index):
os.remove(tmp_index)
@@ -363,14 +383,18 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# UTILITIES
@unbare_repo
- def _iter_expand_paths(self: 'IndexFile', paths: Sequence[PathLike]) -> Iterator[PathLike]:
+ def _iter_expand_paths(
+ self: "IndexFile", paths: Sequence[PathLike]
+ ) -> Iterator[PathLike]:
"""Expand the directories in list of paths to the corresponding paths accordingly,
Note: git will add items multiple times even if a glob overlapped
with manually specified paths or if paths where specified multiple
times - we respect that and do not prune"""
+
def raise_exc(e: Exception) -> NoReturn:
raise e
+
r = str(self.repo.working_tree_dir)
rs = r + os.sep
for path in paths:
@@ -380,18 +404,20 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# END make absolute path
try:
- st = os.lstat(abs_path) # handles non-symlinks as well
+ st = os.lstat(abs_path) # handles non-symlinks as well
except OSError:
# the lstat call may fail as the path may contain globs as well
pass
else:
if S_ISLNK(st.st_mode):
- yield abs_path.replace(rs, '')
+ yield abs_path.replace(rs, "")
continue
# end check symlink
# if the path is not already pointing to an existing file, resolve globs if possible
- if not os.path.exists(abs_path) and ('?' in abs_path or '*' in abs_path or '[' in abs_path):
+ if not os.path.exists(abs_path) and (
+ "?" in abs_path or "*" in abs_path or "[" in abs_path
+ ):
resolved_paths = glob.glob(abs_path)
# not abs_path in resolved_paths:
# a glob() resolving to the same path we are feeding it with
@@ -401,25 +427,31 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# whose name contains wildcard characters.
if abs_path not in resolved_paths:
for f in self._iter_expand_paths(glob.glob(abs_path)):
- yield str(f).replace(rs, '')
+ yield str(f).replace(rs, "")
continue
# END glob handling
try:
for root, _dirs, files in os.walk(abs_path, onerror=raise_exc):
for rela_file in files:
# add relative paths only
- yield osp.join(root.replace(rs, ''), rela_file)
+ yield osp.join(root.replace(rs, ""), rela_file)
# END for each file in subdir
# END for each subdirectory
except OSError:
# was a file or something that could not be iterated
- yield abs_path.replace(rs, '')
+ yield abs_path.replace(rs, "")
# END path exception handling
# END for each path
- def _write_path_to_stdin(self, proc: 'Popen', filepath: PathLike, item: PathLike, fmakeexc: Callable[..., GitError],
- fprogress: Callable[[PathLike, bool, PathLike], None],
- read_from_stdout: bool = True) -> Union[None, str]:
+ def _write_path_to_stdin(
+ self,
+ proc: "Popen",
+ filepath: PathLike,
+ item: PathLike,
+ fmakeexc: Callable[..., GitError],
+ fprogress: Callable[[PathLike, bool, PathLike], None],
+ read_from_stdout: bool = True,
+ ) -> Union[None, str]:
"""Write path to proc.stdin and make sure it processes the item, including progress.
:return: stdout string
@@ -451,15 +483,16 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
fprogress(filepath, True, item)
return rval
- def iter_blobs(self, predicate: Callable[[Tuple[StageType, Blob]], bool] = lambda t: True
- ) -> Iterator[Tuple[StageType, Blob]]:
+ def iter_blobs(
+ self, predicate: Callable[[Tuple[StageType, Blob]], bool] = lambda t: True
+ ) -> Iterator[Tuple[StageType, Blob]]:
"""
:return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob)
:param predicate:
Function(t) returning True if tuple(stage, Blob) should be yielded by the
iterator. A default filter, the BlobFilter, allows you to yield blobs
- only if they match a given list of paths. """
+ only if they match a given list of paths."""
for entry in self.entries.values():
blob = entry.to_blob(self.repo)
blob.size = entry.size
@@ -491,11 +524,13 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
return path_map
- @ classmethod
- def entry_key(cls, *entry: Union[BaseIndexEntry, PathLike, StageType]) -> Tuple[PathLike, StageType]:
+ @classmethod
+ def entry_key(
+ cls, *entry: Union[BaseIndexEntry, PathLike, StageType]
+ ) -> Tuple[PathLike, StageType]:
return entry_key(*entry)
- def resolve_blobs(self, iter_blobs: Iterator[Blob]) -> 'IndexFile':
+ def resolve_blobs(self, iter_blobs: Iterator[Blob]) -> "IndexFile":
"""Resolve the blobs given in blob iterator. This will effectively remove the
index entries of the respective path at all non-null stages and add the given
blob as new stage null blob.
@@ -519,7 +554,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# delete all possible stages
for stage in (1, 2, 3):
try:
- del(self.entries[(blob.path, stage)])
+ del self.entries[(blob.path, stage)]
except KeyError:
pass
# END ignore key errors
@@ -530,7 +565,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
return self
- def update(self) -> 'IndexFile':
+ def update(self) -> "IndexFile":
"""Reread the contents of our index file, discarding all cached information
we might have.
@@ -550,7 +585,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
does not yet exist in the object database. This could happen if you added
Entries to the index directly.
:raise ValueError: if there are no entries in the cache
- :raise UnmergedEntriesError: """
+ :raise UnmergedEntriesError:"""
# we obtain no lock as we just flush our contents to disk as tree
# If we are a new index, the entries access will load our data accordingly
mdb = MemoryDB()
@@ -562,13 +597,14 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# note: additional deserialization could be saved if write_tree_from_cache
# would return sorted tree entries
- root_tree = Tree(self.repo, binsha, path='')
+ root_tree = Tree(self.repo, binsha, path="")
root_tree._cache = tree_items
return root_tree
- def _process_diff_args(self, # type: ignore[override]
- args: List[Union[str, 'git_diff.Diffable', Type['git_diff.Diffable.Index']]]
- ) -> List[Union[str, 'git_diff.Diffable', Type['git_diff.Diffable.Index']]]:
+ def _process_diff_args(
+ self, # type: ignore[override]
+ args: List[Union[str, "git_diff.Diffable", Type["git_diff.Diffable.Index"]]],
+ ) -> List[Union[str, "git_diff.Diffable", Type["git_diff.Diffable.Index"]]]:
try:
args.pop(args.index(self))
except IndexError:
@@ -585,12 +621,16 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
if self.repo.bare:
raise InvalidGitRepositoryError("require non-bare repository")
if not str(path).startswith(str(self.repo.working_tree_dir)):
- raise ValueError("Absolute path %r is not in git repository at %r" % (path, self.repo.working_tree_dir))
+ raise ValueError(
+ "Absolute path %r is not in git repository at %r"
+ % (path, self.repo.working_tree_dir)
+ )
return os.path.relpath(path, self.repo.working_tree_dir)
- def _preprocess_add_items(self, items: Sequence[Union[PathLike, Blob, BaseIndexEntry, 'Submodule']]
- ) -> Tuple[List[PathLike], List[BaseIndexEntry]]:
- """ Split the items into two lists of path strings and BaseEntries. """
+ def _preprocess_add_items(
+ self, items: Sequence[Union[PathLike, Blob, BaseIndexEntry, "Submodule"]]
+ ) -> Tuple[List[PathLike], List[BaseIndexEntry]]:
+ """Split the items into two lists of path strings and BaseEntries."""
paths = []
entries = []
# if it is a string put in list
@@ -612,43 +652,58 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
def _store_path(self, filepath: PathLike, fprogress: Callable) -> BaseIndexEntry:
"""Store file at filepath in the database and return the base index entry
Needs the git_working_dir decorator active ! This must be assured in the calling code"""
- st = os.lstat(filepath) # handles non-symlinks as well
+ st = os.lstat(filepath) # handles non-symlinks as well
if S_ISLNK(st.st_mode):
# in PY3, readlink is string, but we need bytes. In PY2, it's just OS encoded bytes, we assume UTF-8
- open_stream: Callable[[], BinaryIO] = lambda: BytesIO(force_bytes(os.readlink(filepath),
- encoding=defenc))
+ open_stream: Callable[[], BinaryIO] = lambda: BytesIO(
+ force_bytes(os.readlink(filepath), encoding=defenc)
+ )
else:
- open_stream = lambda: open(filepath, 'rb')
+ open_stream = lambda: open(filepath, "rb")
with open_stream() as stream:
fprogress(filepath, False, filepath)
istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream))
fprogress(filepath, True, filepath)
- return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode),
- istream.binsha, 0, to_native_path_linux(filepath)))
+ return BaseIndexEntry(
+ (
+ stat_mode_to_index_mode(st.st_mode),
+ istream.binsha,
+ 0,
+ to_native_path_linux(filepath),
+ )
+ )
- @ unbare_repo
- @ git_working_dir
- def _entries_for_paths(self, paths: List[str], path_rewriter: Callable, fprogress: Callable,
- entries: List[BaseIndexEntry]) -> List[BaseIndexEntry]:
+ @unbare_repo
+ @git_working_dir
+ def _entries_for_paths(
+ self,
+ paths: List[str],
+ path_rewriter: Callable,
+ fprogress: Callable,
+ entries: List[BaseIndexEntry],
+ ) -> List[BaseIndexEntry]:
entries_added: List[BaseIndexEntry] = []
if path_rewriter:
for path in paths:
if osp.isabs(path):
abspath = path
- gitrelative_path = path[len(str(self.repo.working_tree_dir)) + 1:]
+ gitrelative_path = path[len(str(self.repo.working_tree_dir)) + 1 :]
else:
gitrelative_path = path
if self.repo.working_tree_dir:
abspath = osp.join(self.repo.working_tree_dir, gitrelative_path)
# end obtain relative and absolute paths
- blob = Blob(self.repo, Blob.NULL_BIN_SHA,
- stat_mode_to_index_mode(os.stat(abspath).st_mode),
- to_native_path_linux(gitrelative_path))
+ blob = Blob(
+ self.repo,
+ Blob.NULL_BIN_SHA,
+ stat_mode_to_index_mode(os.stat(abspath).st_mode),
+ to_native_path_linux(gitrelative_path),
+ )
# TODO: variable undefined
entries.append(BaseIndexEntry.from_blob(blob))
# END for each path
- del(paths[:])
+ del paths[:]
# END rewrite paths
# HANDLE PATHS
@@ -659,9 +714,15 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# END path handling
return entries_added
- def add(self, items: Sequence[Union[PathLike, Blob, BaseIndexEntry, 'Submodule']], force: bool = True,
- fprogress: Callable = lambda *args: None, path_rewriter: Union[Callable[..., PathLike], None] = None,
- write: bool = True, write_extension_data: bool = False) -> List[BaseIndexEntry]:
+ def add(
+ self,
+ items: Sequence[Union[PathLike, Blob, BaseIndexEntry, "Submodule"]],
+ force: bool = True,
+ fprogress: Callable = lambda *args: None,
+ path_rewriter: Union[Callable[..., PathLike], None] = None,
+ write: bool = True,
+ write_extension_data: bool = False,
+ ) -> List[BaseIndexEntry]:
"""Add files from the working tree, specific blobs or BaseIndexEntries
to the index.
@@ -769,30 +830,43 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# That way, we are OK on a bare repository as well.
# If there are no paths, the rewriter has nothing to do either
if paths:
- entries_added.extend(self._entries_for_paths(paths, path_rewriter, fprogress, entries))
+ entries_added.extend(
+ self._entries_for_paths(paths, path_rewriter, fprogress, entries)
+ )
# HANDLE ENTRIES
if entries:
null_mode_entries = [e for e in entries if e.mode == 0]
if null_mode_entries:
raise ValueError(
- "At least one Entry has a null-mode - please use index.remove to remove files for clarity")
+ "At least one Entry has a null-mode - please use index.remove to remove files for clarity"
+ )
# END null mode should be remove
# HANDLE ENTRY OBJECT CREATION
# create objects if required, otherwise go with the existing shas
- null_entries_indices = [i for i, e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA]
+ null_entries_indices = [
+ i for i, e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA
+ ]
if null_entries_indices:
- @ git_working_dir
- def handle_null_entries(self: 'IndexFile') -> None:
+
+ @git_working_dir
+ def handle_null_entries(self: "IndexFile") -> None:
for ei in null_entries_indices:
null_entry = entries[ei]
new_entry = self._store_path(null_entry.path, fprogress)
# update null entry
entries[ei] = BaseIndexEntry(
- (null_entry.mode, new_entry.binsha, null_entry.stage, null_entry.path))
+ (
+ null_entry.mode,
+ new_entry.binsha,
+ null_entry.stage,
+ null_entry.path,
+ )
+ )
# END for each entry index
+
# end closure
handle_null_entries(self)
# END null_entry handling
@@ -802,7 +876,9 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# all object sha's
if path_rewriter:
for i, e in enumerate(entries):
- entries[i] = BaseIndexEntry((e.mode, e.binsha, e.stage, path_rewriter(e)))
+ entries[i] = BaseIndexEntry(
+ (e.mode, e.binsha, e.stage, path_rewriter(e))
+ )
# END for each entry
# END handle path rewriting
@@ -828,8 +904,12 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
return entries_added
- def _items_to_rela_paths(self, items: Union[PathLike, Sequence[Union[PathLike, BaseIndexEntry, Blob, Submodule]]]
- ) -> List[PathLike]:
+ def _items_to_rela_paths(
+ self,
+ items: Union[
+ PathLike, Sequence[Union[PathLike, BaseIndexEntry, Blob, Submodule]]
+ ],
+ ) -> List[PathLike]:
"""Returns a list of repo-relative paths from the given items which
may be absolute or relative paths, entries or blobs"""
paths = []
@@ -847,10 +927,14 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# END for each item
return paths
- @ post_clear_cache
- @ default_index
- def remove(self, items: Sequence[Union[PathLike, Blob, BaseIndexEntry, 'Submodule']], working_tree: bool = False,
- **kwargs: Any) -> List[str]:
+ @post_clear_cache
+ @default_index
+ def remove(
+ self,
+ items: Sequence[Union[PathLike, Blob, BaseIndexEntry, "Submodule"]],
+ working_tree: bool = False,
+ **kwargs: Any
+ ) -> List[str]:
"""Remove the given items from the index and optionally from
the working tree as well.
@@ -885,7 +969,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
List(path_string, ...) list of repository relative paths that have
been removed effectively.
This is interesting to know in case you have provided a directory or
- globs. Paths are relative to the repository. """
+ globs. Paths are relative to the repository."""
args = []
if not working_tree:
args.append("--cached")
@@ -899,10 +983,14 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# rm 'path'
return [p[4:-1] for p in removed_paths]
- @ post_clear_cache
- @ default_index
- def move(self, items: Sequence[Union[PathLike, Blob, BaseIndexEntry, 'Submodule']], skip_errors: bool = False,
- **kwargs: Any) -> List[Tuple[str, str]]:
+ @post_clear_cache
+ @default_index
+ def move(
+ self,
+ items: Sequence[Union[PathLike, Blob, BaseIndexEntry, "Submodule"]],
+ skip_errors: bool = False,
+ **kwargs: Any
+ ) -> List[Tuple[str, str]]:
"""Rename/move the items, whereas the last item is considered the destination of
the move operation. If the destination is a file, the first item ( of two )
must be a file as well. If the destination is a directory, it may be preceded
@@ -928,14 +1016,16 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
GitCommandError: If git could not handle your request"""
args = []
if skip_errors:
- args.append('-k')
+ args.append("-k")
paths = self._items_to_rela_paths(items)
if len(paths) < 2:
- raise ValueError("Please provide at least one source and one destination of the move operation")
+ raise ValueError(
+ "Please provide at least one source and one destination of the move operation"
+ )
- was_dry_run = kwargs.pop('dry_run', kwargs.pop('n', None))
- kwargs['dry_run'] = True
+ was_dry_run = kwargs.pop("dry_run", kwargs.pop("n", None))
+ kwargs["dry_run"] = True
# first execute rename in dryrun so the command tells us what it actually does
# ( for later output )
@@ -945,7 +1035,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# parse result - first 0:n/2 lines are 'checking ', the remaining ones
# are the 'renaming' ones which we parse
for ln in range(int(len(mvlines) / 2), len(mvlines)):
- tokens = mvlines[ln].split(' to ')
+ tokens = mvlines[ln].split(" to ")
assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln]
# [0] = Renaming x
@@ -959,20 +1049,22 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# END handle dryrun
# now apply the actual operation
- kwargs.pop('dry_run')
+ kwargs.pop("dry_run")
self.repo.git.mv(args, paths, **kwargs)
return out
- def commit(self,
- message: str,
- parent_commits: Union[Commit_ish, None] = None,
- head: bool = True,
- author: Union[None, 'Actor'] = None,
- committer: Union[None, 'Actor'] = None,
- author_date: Union[str, None] = None,
- commit_date: Union[str, None] = None,
- skip_hooks: bool = False) -> Commit:
+ def commit(
+ self,
+ message: str,
+ parent_commits: Union[Commit_ish, None] = None,
+ head: bool = True,
+ author: Union[None, "Actor"] = None,
+ committer: Union[None, "Actor"] = None,
+ author_date: Union[str, None] = None,
+ commit_date: Union[str, None] = None,
+ skip_hooks: bool = False,
+ ) -> Commit:
"""Commit the current default index file, creating a commit object.
For more information on the arguments, see Commit.create_from_tree().
@@ -982,18 +1074,26 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
or `--no-verify` on the command line.
:return: Commit object representing the new commit"""
if not skip_hooks:
- run_commit_hook('pre-commit', self)
+ run_commit_hook("pre-commit", self)
self._write_commit_editmsg(message)
- run_commit_hook('commit-msg', self, self._commit_editmsg_filepath())
+ run_commit_hook("commit-msg", self, self._commit_editmsg_filepath())
message = self._read_commit_editmsg()
self._remove_commit_editmsg()
tree = self.write_tree()
- rval = Commit.create_from_tree(self.repo, tree, message, parent_commits,
- head, author=author, committer=committer,
- author_date=author_date, commit_date=commit_date)
+ rval = Commit.create_from_tree(
+ self.repo,
+ tree,
+ message,
+ parent_commits,
+ head,
+ author=author,
+ committer=committer,
+ author_date=author_date,
+ commit_date=commit_date,
+ )
if not skip_hooks:
- run_commit_hook('post-commit', self)
+ run_commit_hook("post-commit", self)
return rval
def _write_commit_editmsg(self, message: str) -> None:
@@ -1010,13 +1110,15 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
def _commit_editmsg_filepath(self) -> str:
return osp.join(self.repo.common_dir, "COMMIT_EDITMSG")
- def _flush_stdin_and_wait(cls, proc: 'Popen[bytes]', ignore_stdout: bool = False) -> bytes:
+ def _flush_stdin_and_wait(
+ cls, proc: "Popen[bytes]", ignore_stdout: bool = False
+ ) -> bytes:
stdin_IO = proc.stdin
if stdin_IO:
stdin_IO.flush()
stdin_IO.close()
- stdout = b''
+ stdout = b""
if not ignore_stdout and proc.stdout:
stdout = proc.stdout.read()
@@ -1025,10 +1127,14 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
proc.wait()
return stdout
- @ default_index
- def checkout(self, paths: Union[None, Iterable[PathLike]] = None, force: bool = False,
- fprogress: Callable = lambda *args: None, **kwargs: Any
- ) -> Union[None, Iterator[PathLike], Sequence[PathLike]]:
+ @default_index
+ def checkout(
+ self,
+ paths: Union[None, Iterable[PathLike]] = None,
+ force: bool = False,
+ fprogress: Callable = lambda *args: None,
+ **kwargs: Any
+ ) -> Union[None, Iterator[PathLike], Sequence[PathLike]]:
"""Checkout the given paths or all files from the version known to the index into
the working tree.
@@ -1070,7 +1176,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
the working tree will not be deleted. This behaviour is fundamentally
different to *head.checkout*, i.e. if you want git-checkout like behaviour,
use head.checkout instead of index.checkout.
- """
+ """
args = ["--index"]
if force:
args.append("--force")
@@ -1079,7 +1185,9 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
failed_reasons = []
unknown_lines = []
- def handle_stderr(proc: 'Popen[bytes]', iter_checked_out_files: Iterable[PathLike]) -> None:
+ def handle_stderr(
+ proc: "Popen[bytes]", iter_checked_out_files: Iterable[PathLike]
+ ) -> None:
stderr_IO = proc.stderr
if not stderr_IO:
@@ -1089,20 +1197,27 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# line contents:
stderr = stderr_bytes.decode(defenc)
# git-checkout-index: this already exists
- endings = (' already exists', ' is not in the cache', ' does not exist at stage', ' is unmerged')
+ endings = (
+ " already exists",
+ " is not in the cache",
+ " does not exist at stage",
+ " is unmerged",
+ )
for line in stderr.splitlines():
- if not line.startswith("git checkout-index: ") and not line.startswith("git-checkout-index: "):
+ if not line.startswith("git checkout-index: ") and not line.startswith(
+ "git-checkout-index: "
+ ):
is_a_dir = " is a directory"
unlink_issue = "unable to unlink old '"
- already_exists_issue = ' already exists, no checkout' # created by entry.c:checkout_entry(...)
+ already_exists_issue = " already exists, no checkout" # created by entry.c:checkout_entry(...)
if line.endswith(is_a_dir):
- failed_files.append(line[:-len(is_a_dir)])
+ failed_files.append(line[: -len(is_a_dir)])
failed_reasons.append(is_a_dir)
elif line.startswith(unlink_issue):
- failed_files.append(line[len(unlink_issue):line.rfind("'")])
+ failed_files.append(line[len(unlink_issue) : line.rfind("'")])
failed_reasons.append(unlink_issue)
elif line.endswith(already_exists_issue):
- failed_files.append(line[:-len(already_exists_issue)])
+ failed_files.append(line[: -len(already_exists_issue)])
failed_reasons.append(already_exists_issue)
else:
unknown_lines.append(line)
@@ -1111,7 +1226,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
for e in endings:
if line.endswith(e):
- failed_files.append(line[20:-len(e)])
+ failed_files.append(line[20 : -len(e)])
failed_reasons.append(e)
break
# END if ending matches
@@ -1123,12 +1238,16 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
valid_files = list(set(iter_checked_out_files) - set(failed_files))
raise CheckoutError(
"Some files could not be checked out from the index due to local modifications",
- failed_files, valid_files, failed_reasons)
+ failed_files,
+ valid_files,
+ failed_reasons,
+ )
+
# END stderr handler
if paths is None:
args.append("--all")
- kwargs['as_process'] = 1
+ kwargs["as_process"] = 1
fprogress(None, False, None)
proc = self.repo.git.checkout_index(*args, **kwargs)
proc.wait()
@@ -1146,11 +1265,13 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
self.entries
args.append("--stdin")
- kwargs['as_process'] = True
- kwargs['istream'] = subprocess.PIPE
+ kwargs["as_process"] = True
+ kwargs["istream"] = subprocess.PIPE
proc = self.repo.git.checkout_index(args, **kwargs)
# FIXME: Reading from GIL!
- make_exc = lambda: GitCommandError(("git-checkout-index",) + tuple(args), 128, proc.stderr.read())
+ make_exc = lambda: GitCommandError(
+ ("git-checkout-index",) + tuple(args), 128, proc.stderr.read()
+ )
checked_out_files: List[PathLike] = []
for path in paths:
@@ -1162,13 +1283,14 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
self.entries[(co_path, 0)]
except KeyError:
folder = str(co_path)
- if not folder.endswith('/'):
- folder += '/'
+ if not folder.endswith("/"):
+ folder += "/"
for entry in self.entries.values():
if str(entry.path).startswith(folder):
p = entry.path
- self._write_path_to_stdin(proc, p, p, make_exc,
- fprogress, read_from_stdout=False)
+ self._write_path_to_stdin(
+ proc, p, p, make_exc, fprogress, read_from_stdout=False
+ )
checked_out_files.append(p)
path_is_directory = True
# END if entry is in directory
@@ -1176,8 +1298,9 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# END path exception handlnig
if not path_is_directory:
- self._write_path_to_stdin(proc, co_path, path, make_exc,
- fprogress, read_from_stdout=False)
+ self._write_path_to_stdin(
+ proc, co_path, path, make_exc, fprogress, read_from_stdout=False
+ )
checked_out_files.append(co_path)
# END path is a file
# END for each path
@@ -1187,16 +1310,24 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# Without parsing stdout we don't know what failed.
raise CheckoutError(
"Some files could not be checked out from the index, probably because they didn't exist.",
- failed_files, [], failed_reasons)
+ failed_files,
+ [],
+ failed_reasons,
+ )
handle_stderr(proc, checked_out_files)
return checked_out_files
# END paths handling
- @ default_index
- def reset(self, commit: Union[Commit, 'Reference', str] = 'HEAD', working_tree: bool = False,
- paths: Union[None, Iterable[PathLike]] = None,
- head: bool = False, **kwargs: Any) -> 'IndexFile':
+ @default_index
+ def reset(
+ self,
+ commit: Union[Commit, "Reference", str] = "HEAD",
+ working_tree: bool = False,
+ paths: Union[None, Iterable[PathLike]] = None,
+ head: bool = False,
+ **kwargs: Any
+ ) -> "IndexFile":
"""Reset the index to reflect the tree at the given commit. This will not
adjust our HEAD reference as opposed to HEAD.reset by default.
@@ -1228,7 +1359,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
checkout the files according to their state in the index.
If you want git-reset like behaviour, use *HEAD.reset* instead.
- :return: self """
+ :return: self"""
# what we actually want to do is to merge the tree into our existing
# index, which is what git-read-tree does
new_inst = type(self).from_tree(self.repo, commit)
@@ -1244,7 +1375,7 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
except KeyError:
# if key is not in theirs, it musn't be in ours
try:
- del(self.entries[key])
+ del self.entries[key]
except KeyError:
pass
# END handle deletion keyerror
@@ -1258,17 +1389,23 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# END handle working tree
if head:
- self.repo.head.set_commit(self.repo.commit(commit), logmsg="%s: Updating HEAD" % commit)
+ self.repo.head.set_commit(
+ self.repo.commit(commit), logmsg="%s: Updating HEAD" % commit
+ )
# END handle head change
return self
# @ default_index, breaks typing for some reason, copied into function
- def diff(self, # type: ignore[override]
- other: Union[Type['git_diff.Diffable.Index'], 'Tree', 'Commit', str, None] = git_diff.Diffable.Index,
- paths: Union[PathLike, List[PathLike], Tuple[PathLike, ...], None] = None,
- create_patch: bool = False, **kwargs: Any
- ) -> git_diff.DiffIndex:
+ def diff(
+ self, # type: ignore[override]
+ other: Union[
+ Type["git_diff.Diffable.Index"], "Tree", "Commit", str, None
+ ] = git_diff.Diffable.Index,
+ paths: Union[PathLike, List[PathLike], Tuple[PathLike, ...], None] = None,
+ create_patch: bool = False,
+ **kwargs: Any
+ ) -> git_diff.DiffIndex:
"""Diff this index against the working copy or a Tree or Commit object
For a documentation of the parameters and return values, see,
@@ -1282,7 +1419,9 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
# only run if we are the default repository index
if self._file_path != self._index_path():
raise AssertionError(
- "Cannot call %r on indices that do not represent the default git index" % self.diff())
+ "Cannot call %r on indices that do not represent the default git index"
+ % self.diff()
+ )
# index against index is always empty
if other is self.Index:
return git_diff.DiffIndex()
@@ -1296,14 +1435,16 @@ class IndexFile(LazyMixin, git_diff.Diffable, Serializable):
if isinstance(other, Object): # for Tree or Commit
# invert the existing R flag
- cur_val = kwargs.get('R', False)
- kwargs['R'] = not cur_val
+ cur_val = kwargs.get("R", False)
+ kwargs["R"] = not cur_val
return other.diff(self.Index, paths, create_patch, **kwargs)
# END diff against other item handling
# if other is not None here, something is wrong
if other is not None:
- raise ValueError("other must be None, Diffable.Index, a Tree or Commit, was %r" % other)
+ raise ValueError(
+ "other must be None, Diffable.Index, a Tree or Commit, was %r" % other
+ )
# diff against working copy - can be handled by superclass natively
return super(IndexFile, self).diff(other, paths, create_patch, **kwargs)
diff --git a/git/index/fun.py b/git/index/fun.py
index acab7423..e8dead86 100644
--- a/git/index/fun.py
+++ b/git/index/fun.py
@@ -25,14 +25,11 @@ from git.compat import (
is_win,
safe_decode,
)
-from git.exc import (
- UnmergedEntriesError,
- HookExecutionError
-)
+from git.exc import UnmergedEntriesError, HookExecutionError
from git.objects.fun import (
tree_to_stream,
traverse_tree_recursive,
- traverse_trees_recursive
+ traverse_trees_recursive,
)
from git.util import IndexFileSHA1Writer, finalize_process
from gitdb.base import IStream
@@ -40,20 +37,12 @@ from gitdb.typ import str_tree_type
import os.path as osp
-from .typ import (
- BaseIndexEntry,
- IndexEntry,
- CE_NAMEMASK,
- CE_STAGESHIFT
-)
-from .util import (
- pack,
- unpack
-)
+from .typ import BaseIndexEntry, IndexEntry, CE_NAMEMASK, CE_STAGESHIFT
+from .util import pack, unpack
# typing -----------------------------------------------------------------------------
-from typing import (Dict, IO, List, Sequence, TYPE_CHECKING, Tuple, Type, Union, cast)
+from typing import Dict, IO, List, Sequence, TYPE_CHECKING, Tuple, Type, Union, cast
from git.types import PathLike
@@ -61,40 +50,49 @@ if TYPE_CHECKING:
from .base import IndexFile
from git.db import GitCmdObjectDB
from git.objects.tree import TreeCacheTup
+
# from git.objects.fun import EntryTupOrNone
# ------------------------------------------------------------------------------------
-S_IFGITLINK = S_IFLNK | S_IFDIR # a submodule
+S_IFGITLINK = S_IFLNK | S_IFDIR # a submodule
CE_NAMEMASK_INV = ~CE_NAMEMASK
-__all__ = ('write_cache', 'read_cache', 'write_tree_from_cache', 'entry_key',
- 'stat_mode_to_index_mode', 'S_IFGITLINK', 'run_commit_hook', 'hook_path')
+__all__ = (
+ "write_cache",
+ "read_cache",
+ "write_tree_from_cache",
+ "entry_key",
+ "stat_mode_to_index_mode",
+ "S_IFGITLINK",
+ "run_commit_hook",
+ "hook_path",
+)
def hook_path(name: str, git_dir: PathLike) -> str:
""":return: path to the given named hook in the given git repository directory"""
- return osp.join(git_dir, 'hooks', name)
+ return osp.join(git_dir, "hooks", name)
def _has_file_extension(path):
return osp.splitext(path)[1]
-def run_commit_hook(name: str, index: 'IndexFile', *args: str) -> None:
+def run_commit_hook(name: str, index: "IndexFile", *args: str) -> None:
"""Run the commit hook of the given name. Silently ignores hooks that do not exist.
:param name: name of hook, like 'pre-commit'
:param index: IndexFile instance
:param args: arguments passed to hook file
- :raises HookExecutionError: """
+ :raises HookExecutionError:"""
hp = hook_path(name, index.repo.git_dir)
if not os.access(hp, os.X_OK):
return None
env = os.environ.copy()
- env['GIT_INDEX_FILE'] = safe_decode(str(index.path))
- env['GIT_EDITOR'] = ':'
+ env["GIT_INDEX_FILE"] = safe_decode(str(index.path))
+ env["GIT_EDITOR"] = ":"
cmd = [hp]
try:
if is_win and not _has_file_extension(hp):
@@ -102,22 +100,26 @@ def run_commit_hook(name: str, index: 'IndexFile', *args: str) -> None:
# (doesn't understand shebangs). Try using bash to run the hook.
relative_hp = Path(hp).relative_to(index.repo.working_dir).as_posix()
cmd = ["bash.exe", relative_hp]
-
- cmd = subprocess.Popen(cmd + list(args),
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=index.repo.working_dir,
- close_fds=is_posix,
- creationflags=PROC_CREATIONFLAGS,)
+
+ cmd = subprocess.Popen(
+ cmd + list(args),
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=index.repo.working_dir,
+ close_fds=is_posix,
+ creationflags=PROC_CREATIONFLAGS,
+ )
except Exception as ex:
raise HookExecutionError(hp, ex) from ex
else:
stdout_list: List[str] = []
stderr_list: List[str] = []
- handle_process_output(cmd, stdout_list.append, stderr_list.append, finalize_process)
- stdout = ''.join(stdout_list)
- stderr = ''.join(stderr_list)
+ handle_process_output(
+ cmd, stdout_list.append, stderr_list.append, finalize_process
+ )
+ stdout = "".join(stdout_list)
+ stderr = "".join(stderr_list)
if cmd.returncode != 0:
stdout = force_text(stdout, defenc)
stderr = force_text(stderr, defenc)
@@ -128,16 +130,21 @@ def run_commit_hook(name: str, index: 'IndexFile', *args: str) -> None:
def stat_mode_to_index_mode(mode: int) -> int:
"""Convert the given mode from a stat call to the corresponding index mode
and return it"""
- if S_ISLNK(mode): # symlinks
+ if S_ISLNK(mode): # symlinks
return S_IFLNK
- if S_ISDIR(mode) or S_IFMT(mode) == S_IFGITLINK: # submodules
+ if S_ISDIR(mode) or S_IFMT(mode) == S_IFGITLINK: # submodules
return S_IFGITLINK
- return S_IFREG | (mode & S_IXUSR and 0o755 or 0o644) # blobs with or without executable bit
+ return S_IFREG | (
+ mode & S_IXUSR and 0o755 or 0o644
+ ) # blobs with or without executable bit
-def write_cache(entries: Sequence[Union[BaseIndexEntry, 'IndexEntry']], stream: IO[bytes],
- extension_data: Union[None, bytes] = None,
- ShaStreamCls: Type[IndexFileSHA1Writer] = IndexFileSHA1Writer) -> None:
+def write_cache(
+ entries: Sequence[Union[BaseIndexEntry, "IndexEntry"]],
+ stream: IO[bytes],
+ extension_data: Union[None, bytes] = None,
+ ShaStreamCls: Type[IndexFileSHA1Writer] = IndexFileSHA1Writer,
+) -> None:
"""Write the cache represented by entries to a stream
:param entries: **sorted** list of entries
@@ -163,17 +170,28 @@ def write_cache(entries: Sequence[Union[BaseIndexEntry, 'IndexEntry']], stream:
# body
for entry in entries:
beginoffset = tell()
- write(entry.ctime_bytes) # ctime
- write(entry.mtime_bytes) # mtime
+ write(entry.ctime_bytes) # ctime
+ write(entry.mtime_bytes) # mtime
path_str = str(entry.path)
path: bytes = force_bytes(path_str, encoding=defenc)
- plen = len(path) & CE_NAMEMASK # path length
+ plen = len(path) & CE_NAMEMASK # path length
assert plen == len(path), "Path %s too long to fit into index" % entry.path
- flags = plen | (entry.flags & CE_NAMEMASK_INV) # clear possible previous values
- write(pack(">LLLLLL20sH", entry.dev, entry.inode, entry.mode,
- entry.uid, entry.gid, entry.size, entry.binsha, flags))
+ flags = plen | (entry.flags & CE_NAMEMASK_INV) # clear possible previous values
+ write(
+ pack(
+ ">LLLLLL20sH",
+ entry.dev,
+ entry.inode,
+ entry.mode,
+ entry.uid,
+ entry.gid,
+ entry.size,
+ entry.binsha,
+ flags,
+ )
+ )
write(path)
- real_size = ((tell() - beginoffset + 8) & ~7)
+ real_size = (tell() - beginoffset + 8) & ~7
write(b"\0" * ((beginoffset + real_size) - tell()))
# END for each entry
@@ -216,7 +234,9 @@ def entry_key(*entry: Union[BaseIndexEntry, PathLike, int]) -> Tuple[PathLike, i
# END handle entry
-def read_cache(stream: IO[bytes]) -> Tuple[int, Dict[Tuple[PathLike, int], 'IndexEntry'], bytes, bytes]:
+def read_cache(
+ stream: IO[bytes],
+) -> Tuple[int, Dict[Tuple[PathLike, int], "IndexEntry"], bytes, bytes]:
"""Read a cache file from the given stream
:return: tuple(version, entries_dict, extension_data, content_sha)
* version is the integer version number
@@ -225,7 +245,7 @@ def read_cache(stream: IO[bytes]) -> Tuple[int, Dict[Tuple[PathLike, int], 'Inde
* content_sha is a 20 byte sha on all cache file contents"""
version, num_entries = read_header(stream)
count = 0
- entries: Dict[Tuple[PathLike, int], 'IndexEntry'] = {}
+ entries: Dict[Tuple[PathLike, int], "IndexEntry"] = {}
read = stream.read
tell = stream.tell
@@ -233,14 +253,17 @@ def read_cache(stream: IO[bytes]) -> Tuple[int, Dict[Tuple[PathLike, int], 'Inde
beginoffset = tell()
ctime = unpack(">8s", read(8))[0]
mtime = unpack(">8s", read(8))[0]
- (dev, ino, mode, uid, gid, size, sha, flags) = \
- unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2))
+ (dev, ino, mode, uid, gid, size, sha, flags) = unpack(
+ ">LLLLLL20sH", read(20 + 4 * 6 + 2)
+ )
path_size = flags & CE_NAMEMASK
path = read(path_size).decode(defenc)
- real_size = ((tell() - beginoffset + 8) & ~7)
+ real_size = (tell() - beginoffset + 8) & ~7
read((beginoffset + real_size) - tell())
- entry = IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size))
+ entry = IndexEntry(
+ (mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size)
+ )
# entry_key would be the method to use, but we safe the effort
entries[(path, entry.stage)] = entry
count += 1
@@ -253,19 +276,22 @@ def read_cache(stream: IO[bytes]) -> Tuple[int, Dict[Tuple[PathLike, int], 'Inde
# 4 bytes length of chunk
# repeated 0 - N times
extension_data = stream.read(~0)
- assert len(extension_data) > 19, "Index Footer was not at least a sha on content as it was only %i bytes in size"\
- % len(extension_data)
+ assert len(extension_data) > 19, (
+ "Index Footer was not at least a sha on content as it was only %i bytes in size"
+ % len(extension_data)
+ )
content_sha = extension_data[-20:]
# truncate the sha in the end as we will dynamically create it anyway
- extension_data = extension_data[: -20]
+ extension_data = extension_data[:-20]
return (version, entries, extension_data, content_sha)
-def write_tree_from_cache(entries: List[IndexEntry], odb: 'GitCmdObjectDB', sl: slice, si: int = 0
- ) -> Tuple[bytes, List['TreeCacheTup']]:
+def write_tree_from_cache(
+ entries: List[IndexEntry], odb: "GitCmdObjectDB", sl: slice, si: int = 0
+) -> Tuple[bytes, List["TreeCacheTup"]]:
"""Create a tree from the given sorted list of entries and put the respective
trees into the given object database
@@ -275,7 +301,7 @@ def write_tree_from_cache(entries: List[IndexEntry], odb: 'GitCmdObjectDB', sl:
:param sl: slice indicating the range we should process on the entries list
:return: tuple(binsha, list(tree_entry, ...)) a tuple of a sha and a list of
tree entries being a tuple of hexsha, mode, name"""
- tree_items: List['TreeCacheTup'] = []
+ tree_items: List["TreeCacheTup"] = []
ci = sl.start
end = sl.stop
@@ -285,7 +311,7 @@ def write_tree_from_cache(entries: List[IndexEntry], odb: 'GitCmdObjectDB', sl:
raise UnmergedEntriesError(entry)
# END abort on unmerged
ci += 1
- rbound = entry.path.find('/', si)
+ rbound = entry.path.find("/", si)
if rbound == -1:
# its not a tree
tree_items.append((entry.binsha, entry.mode, entry.path[si:]))
@@ -295,7 +321,7 @@ def write_tree_from_cache(entries: List[IndexEntry], odb: 'GitCmdObjectDB', sl:
xi = ci
while xi < end:
oentry = entries[xi]
- orbound = oentry.path.find('/', si)
+ orbound = oentry.path.find("/", si)
if orbound == -1 or oentry.path[si:orbound] != base:
break
# END abort on base mismatch
@@ -304,7 +330,9 @@ def write_tree_from_cache(entries: List[IndexEntry], odb: 'GitCmdObjectDB', sl:
# enter recursion
# ci - 1 as we want to count our current item as well
- sha, _tree_entry_list = write_tree_from_cache(entries, odb, slice(ci - 1, xi), rbound + 1)
+ sha, _tree_entry_list = write_tree_from_cache(
+ entries, odb, slice(ci - 1, xi), rbound + 1
+ )
tree_items.append((sha, S_IFDIR, base))
# skip ahead
@@ -314,18 +342,26 @@ def write_tree_from_cache(entries: List[IndexEntry], odb: 'GitCmdObjectDB', sl:
# finally create the tree
sio = BytesIO()
- tree_to_stream(tree_items, sio.write) # writes to stream as bytes, but doesn't change tree_items
+ tree_to_stream(
+ tree_items, sio.write
+ ) # writes to stream as bytes, but doesn't change tree_items
sio.seek(0)
istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
return (istream.binsha, tree_items)
-def _tree_entry_to_baseindexentry(tree_entry: 'TreeCacheTup', stage: int) -> BaseIndexEntry:
- return BaseIndexEntry((tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2]))
+def _tree_entry_to_baseindexentry(
+ tree_entry: "TreeCacheTup", stage: int
+) -> BaseIndexEntry:
+ return BaseIndexEntry(
+ (tree_entry[1], tree_entry[0], stage << CE_STAGESHIFT, tree_entry[2])
+ )
-def aggressive_tree_merge(odb: 'GitCmdObjectDB', tree_shas: Sequence[bytes]) -> List[BaseIndexEntry]:
+def aggressive_tree_merge(
+ odb: "GitCmdObjectDB", tree_shas: Sequence[bytes]
+) -> List[BaseIndexEntry]:
"""
:return: list of BaseIndexEntries representing the aggressive merge of the given
trees. All valid entries are on stage 0, whereas the conflicting ones are left
@@ -339,7 +375,7 @@ def aggressive_tree_merge(odb: 'GitCmdObjectDB', tree_shas: Sequence[bytes]) ->
# one and two way is the same for us, as we don't have to handle an existing
# index, instrea
if len(tree_shas) in (1, 2):
- for entry in traverse_tree_recursive(odb, tree_shas[-1], ''):
+ for entry in traverse_tree_recursive(odb, tree_shas[-1], ""):
out.append(_tree_entry_to_baseindexentry(entry, 0))
# END for each entry
return out
@@ -349,7 +385,7 @@ def aggressive_tree_merge(odb: 'GitCmdObjectDB', tree_shas: Sequence[bytes]) ->
raise ValueError("Cannot handle %i trees at once" % len(tree_shas))
# three trees
- for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ''):
+ for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ""):
if base is not None:
# base version exists
if ours is not None:
@@ -358,8 +394,15 @@ def aggressive_tree_merge(odb: 'GitCmdObjectDB', tree_shas: Sequence[bytes]) ->
# it exists in all branches, if it was changed in both
# its a conflict, otherwise we take the changed version
# This should be the most common branch, so it comes first
- if(base[0] != ours[0] and base[0] != theirs[0] and ours[0] != theirs[0]) or \
- (base[1] != ours[1] and base[1] != theirs[1] and ours[1] != theirs[1]):
+ if (
+ base[0] != ours[0]
+ and base[0] != theirs[0]
+ and ours[0] != theirs[0]
+ ) or (
+ base[1] != ours[1]
+ and base[1] != theirs[1]
+ and ours[1] != theirs[1]
+ ):
# changed by both
out.append(_tree_entry_to_baseindexentry(base, 1))
out.append(_tree_entry_to_baseindexentry(ours, 2))
diff --git a/git/index/typ.py b/git/index/typ.py
index 46f1b077..cbe26f27 100644
--- a/git/index/typ.py
+++ b/git/index/typ.py
@@ -2,16 +2,13 @@
from binascii import b2a_hex
-from .util import (
- pack,
- unpack
-)
+from .util import pack, unpack
from git.objects import Blob
# typing ----------------------------------------------------------------------
-from typing import (NamedTuple, Sequence, TYPE_CHECKING, Tuple, Union, cast)
+from typing import NamedTuple, Sequence, TYPE_CHECKING, Tuple, Union, cast
from git.types import PathLike
@@ -20,16 +17,16 @@ if TYPE_CHECKING:
# ---------------------------------------------------------------------------------
-__all__ = ('BlobFilter', 'BaseIndexEntry', 'IndexEntry')
+__all__ = ("BlobFilter", "BaseIndexEntry", "IndexEntry")
-#{ Invariants
-CE_NAMEMASK = 0x0fff
+# { Invariants
+CE_NAMEMASK = 0x0FFF
CE_STAGEMASK = 0x3000
CE_EXTENDED = 0x4000
CE_VALID = 0x8000
CE_STAGESHIFT = 12
-#} END invariants
+# } END invariants
class BlobFilter(object):
@@ -40,7 +37,8 @@ class BlobFilter(object):
The given paths are given relative to the repository.
"""
- __slots__ = 'paths'
+
+ __slots__ = "paths"
def __init__(self, paths: Sequence[PathLike]) -> None:
"""
@@ -62,6 +60,7 @@ class BlobFilter(object):
class BaseIndexEntryHelper(NamedTuple):
"""Typed namedtuple to provide named attribute access for BaseIndexEntry.
Needed to allow overriding __new__ in child class to preserve backwards compat."""
+
mode: int
binsha: bytes
flags: int
@@ -85,10 +84,14 @@ class BaseIndexEntry(BaseIndexEntryHelper):
use numeric indices for performance reasons.
"""
- def __new__(cls, inp_tuple: Union[Tuple[int, bytes, int, PathLike],
- Tuple[int, bytes, int, PathLike, bytes, bytes, int, int, int, int, int]]
- ) -> 'BaseIndexEntry':
- """Override __new__ to allow construction from a tuple for backwards compatibility """
+ def __new__(
+ cls,
+ inp_tuple: Union[
+ Tuple[int, bytes, int, PathLike],
+ Tuple[int, bytes, int, PathLike, bytes, bytes, int, int, int, int, int],
+ ],
+ ) -> "BaseIndexEntry":
+ """Override __new__ to allow construction from a tuple for backwards compatibility"""
return super().__new__(cls, *inp_tuple)
def __str__(self) -> str:
@@ -100,7 +103,7 @@ class BaseIndexEntry(BaseIndexEntryHelper):
@property
def hexsha(self) -> str:
"""hex version of our sha"""
- return b2a_hex(self.binsha).decode('ascii')
+ return b2a_hex(self.binsha).decode("ascii")
@property
def stage(self) -> int:
@@ -116,11 +119,11 @@ class BaseIndexEntry(BaseIndexEntryHelper):
return (self.flags & CE_STAGEMASK) >> CE_STAGESHIFT
@classmethod
- def from_blob(cls, blob: Blob, stage: int = 0) -> 'BaseIndexEntry':
+ def from_blob(cls, blob: Blob, stage: int = 0) -> "BaseIndexEntry":
""":return: Fully equipped BaseIndexEntry at the given stage"""
return cls((blob.mode, blob.binsha, stage << CE_STAGESHIFT, blob.path))
- def to_blob(self, repo: 'Repo') -> Blob:
+ def to_blob(self, repo: "Repo") -> Blob:
""":return: Blob using the information of this index entry"""
return Blob(repo, self.binsha, self.mode, self.path)
@@ -132,7 +135,8 @@ class IndexEntry(BaseIndexEntry):
Attributes usully accessed often are cached in the tuple whereas others are
unpacked on demand.
- See the properties for a mapping between names and tuple indices. """
+ See the properties for a mapping between names and tuple indices."""
+
@property
def ctime(self) -> Tuple[int, int]:
"""
@@ -143,11 +147,11 @@ class IndexEntry(BaseIndexEntry):
@property
def mtime(self) -> Tuple[int, int]:
- """See ctime property, but returns modification time """
+ """See ctime property, but returns modification time"""
return cast(Tuple[int, int], unpack(">LL", self.mtime_bytes))
@classmethod
- def from_base(cls, base: 'BaseIndexEntry') -> 'IndexEntry':
+ def from_base(cls, base: "BaseIndexEntry") -> "IndexEntry":
"""
:return:
Minimal entry as created from the given BaseIndexEntry instance.
@@ -155,11 +159,26 @@ class IndexEntry(BaseIndexEntry):
:param base: Instance of type BaseIndexEntry"""
time = pack(">LL", 0, 0)
- return IndexEntry((base.mode, base.binsha, base.flags, base.path, time, time, 0, 0, 0, 0, 0))
+ return IndexEntry(
+ (base.mode, base.binsha, base.flags, base.path, time, time, 0, 0, 0, 0, 0)
+ )
@classmethod
- def from_blob(cls, blob: Blob, stage: int = 0) -> 'IndexEntry':
+ def from_blob(cls, blob: Blob, stage: int = 0) -> "IndexEntry":
""":return: Minimal entry resembling the given blob object"""
time = pack(">LL", 0, 0)
- return IndexEntry((blob.mode, blob.binsha, stage << CE_STAGESHIFT, blob.path,
- time, time, 0, 0, 0, 0, blob.size))
+ return IndexEntry(
+ (
+ blob.mode,
+ blob.binsha,
+ stage << CE_STAGESHIFT,
+ blob.path,
+ time,
+ time,
+ 0,
+ 0,
+ 0,
+ 0,
+ blob.size,
+ )
+ )
diff --git a/git/index/util.py b/git/index/util.py
index 4f8af553..7339b147 100644
--- a/git/index/util.py
+++ b/git/index/util.py
@@ -11,7 +11,7 @@ import os.path as osp
# typing ----------------------------------------------------------------------
-from typing import (Any, Callable, TYPE_CHECKING)
+from typing import Any, Callable, TYPE_CHECKING
from git.types import PathLike, _T
@@ -21,24 +21,26 @@ if TYPE_CHECKING:
# ---------------------------------------------------------------------------------
-__all__ = ('TemporaryFileSwap', 'post_clear_cache', 'default_index', 'git_working_dir')
+__all__ = ("TemporaryFileSwap", "post_clear_cache", "default_index", "git_working_dir")
-#{ Aliases
+# { Aliases
pack = struct.pack
unpack = struct.unpack
-#} END aliases
+# } END aliases
+
class TemporaryFileSwap(object):
"""Utility class moving a file to a temporary location within the same directory
and moving it back on to where on object deletion."""
+
__slots__ = ("file_path", "tmp_file_path")
def __init__(self, file_path: PathLike) -> None:
self.file_path = file_path
- self.tmp_file_path = str(self.file_path) + tempfile.mktemp('', '', '')
+ self.tmp_file_path = str(self.file_path) + tempfile.mktemp("", "", "")
# it may be that the source does not exist
try:
os.rename(self.file_path, self.tmp_file_path)
@@ -53,7 +55,8 @@ class TemporaryFileSwap(object):
# END temp file exists
-#{ Decorators
+# { Decorators
+
def post_clear_cache(func: Callable[..., _T]) -> Callable[..., _T]:
"""Decorator for functions that alter the index using the git command. This would
@@ -66,10 +69,13 @@ def post_clear_cache(func: Callable[..., _T]) -> Callable[..., _T]:
"""
@wraps(func)
- def post_clear_cache_if_not_raised(self: 'IndexFile', *args: Any, **kwargs: Any) -> _T:
+ def post_clear_cache_if_not_raised(
+ self: "IndexFile", *args: Any, **kwargs: Any
+ ) -> _T:
rval = func(self, *args, **kwargs)
self._delete_entries_cache()
return rval
+
# END wrapper method
return post_clear_cache_if_not_raised
@@ -78,14 +84,17 @@ def post_clear_cache(func: Callable[..., _T]) -> Callable[..., _T]:
def default_index(func: Callable[..., _T]) -> Callable[..., _T]:
"""Decorator assuring the wrapped method may only run if we are the default
repository index. This is as we rely on git commands that operate
- on that index only. """
+ on that index only."""
@wraps(func)
- def check_default_index(self: 'IndexFile', *args: Any, **kwargs: Any) -> _T:
+ def check_default_index(self: "IndexFile", *args: Any, **kwargs: Any) -> _T:
if self._file_path != self._index_path():
raise AssertionError(
- "Cannot call %r on indices that do not represent the default git index" % func.__name__)
+ "Cannot call %r on indices that do not represent the default git index"
+ % func.__name__
+ )
return func(self, *args, **kwargs)
+
# END wrapper method
return check_default_index
@@ -96,7 +105,7 @@ def git_working_dir(func: Callable[..., _T]) -> Callable[..., _T]:
repository in order to assure relative paths are handled correctly"""
@wraps(func)
- def set_git_working_dir(self: 'IndexFile', *args: Any, **kwargs: Any) -> _T:
+ def set_git_working_dir(self: "IndexFile", *args: Any, **kwargs: Any) -> _T:
cur_wd = os.getcwd()
os.chdir(str(self.repo.working_tree_dir))
try:
@@ -104,8 +113,10 @@ def git_working_dir(func: Callable[..., _T]) -> Callable[..., _T]:
finally:
os.chdir(cur_wd)
# END handle working dir
+
# END wrapper
return set_git_working_dir
-#} END decorators
+
+# } END decorators
diff --git a/git/objects/__init__.py b/git/objects/__init__.py
index 1d0bb7a5..d2e1e53a 100644
--- a/git/objects/__init__.py
+++ b/git/objects/__init__.py
@@ -12,13 +12,17 @@ from .submodule.base import *
from .submodule.root import *
from .tag import *
from .tree import *
+
# Fix import dependency - add IndexObject to the util module, so that it can be
# imported by the submodule.base
smutil.IndexObject = IndexObject # type: ignore[attr-defined]
smutil.Object = Object # type: ignore[attr-defined]
-del(smutil)
+del smutil
# must come after submodule was made available
-__all__ = [name for name, obj in locals().items()
- if not (name.startswith('_') or inspect.ismodule(obj))]
+__all__ = [
+ name
+ for name, obj in locals().items()
+ if not (name.startswith("_") or inspect.ismodule(obj))
+]
diff --git a/git/objects/base.py b/git/objects/base.py
index 66e15a8f..9d005725 100644
--- a/git/objects/base.py
+++ b/git/objects/base.py
@@ -27,7 +27,7 @@ if TYPE_CHECKING:
from .submodule.base import Submodule
from git.refs.reference import Reference
-IndexObjUnion = Union['Tree', 'Blob', 'Submodule']
+IndexObjUnion = Union["Tree", "Blob", "Submodule"]
# --------------------------------------------------------------------------
@@ -40,14 +40,20 @@ __all__ = ("Object", "IndexObject")
class Object(LazyMixin):
"""Implements an Object which may be Blobs, Trees, Commits and Tags"""
- NULL_HEX_SHA = '0' * 40
- NULL_BIN_SHA = b'\0' * 20
- TYPES = (dbtyp.str_blob_type, dbtyp.str_tree_type, dbtyp.str_commit_type, dbtyp.str_tag_type)
+ NULL_HEX_SHA = "0" * 40
+ NULL_BIN_SHA = b"\0" * 20
+
+ TYPES = (
+ dbtyp.str_blob_type,
+ dbtyp.str_tree_type,
+ dbtyp.str_commit_type,
+ dbtyp.str_tag_type,
+ )
__slots__ = ("repo", "binsha", "size")
type: Union[Lit_commit_ish, None] = None
- def __init__(self, repo: 'Repo', binsha: bytes):
+ def __init__(self, repo: "Repo", binsha: bytes):
"""Initialize an object by identifying it by its binary sha.
All keyword arguments will be set on demand if None.
@@ -57,10 +63,13 @@ class Object(LazyMixin):
super(Object, self).__init__()
self.repo = repo
self.binsha = binsha
- assert len(binsha) == 20, "Require 20 byte binary sha, got %r, len = %i" % (binsha, len(binsha))
+ assert len(binsha) == 20, "Require 20 byte binary sha, got %r, len = %i" % (
+ binsha,
+ len(binsha),
+ )
@classmethod
- def new(cls, repo: 'Repo', id: Union[str, 'Reference']) -> Commit_ish:
+ def new(cls, repo: "Repo", id: Union[str, "Reference"]) -> Commit_ish:
"""
:return: New Object instance of a type appropriate to the object type behind
id. The id of the newly created object will be a binsha even though
@@ -73,14 +82,14 @@ class Object(LazyMixin):
return repo.rev_parse(str(id))
@classmethod
- def new_from_sha(cls, repo: 'Repo', sha1: bytes) -> Commit_ish:
+ def new_from_sha(cls, repo: "Repo", sha1: bytes) -> Commit_ish:
"""
:return: new object instance of a type appropriate to represent the given
binary sha1
:param sha1: 20 byte binary sha1"""
if sha1 == cls.NULL_BIN_SHA:
# the NULL binsha is always the root commit
- return get_object_type_by_name(b'commit')(repo, sha1)
+ return get_object_type_by_name(b"commit")(repo, sha1)
# END handle special case
oinfo = repo.odb.info(sha1)
inst = get_object_type_by_name(oinfo.type)(repo, oinfo.binsha)
@@ -98,13 +107,13 @@ class Object(LazyMixin):
def __eq__(self, other: Any) -> bool:
""":return: True if the objects have the same SHA1"""
- if not hasattr(other, 'binsha'):
+ if not hasattr(other, "binsha"):
return False
return self.binsha == other.binsha
def __ne__(self, other: Any) -> bool:
- """:return: True if the objects do not have the same SHA1 """
- if not hasattr(other, 'binsha'):
+ """:return: True if the objects do not have the same SHA1"""
+ if not hasattr(other, "binsha"):
return True
return self.binsha != other.binsha
@@ -124,15 +133,15 @@ class Object(LazyMixin):
def hexsha(self) -> str:
""":return: 40 byte hex version of our 20 byte binary sha"""
# b2a_hex produces bytes
- return bin_to_hex(self.binsha).decode('ascii')
+ return bin_to_hex(self.binsha).decode("ascii")
@property
- def data_stream(self) -> 'OStream':
- """ :return: File Object compatible stream to the uncompressed raw data of the object
+ def data_stream(self) -> "OStream":
+ """:return: File Object compatible stream to the uncompressed raw data of the object
:note: returned streams must be read in order"""
return self.repo.odb.stream(self.binsha)
- def stream_data(self, ostream: 'OStream') -> 'Object':
+ def stream_data(self, ostream: "OStream") -> "Object":
"""Writes our data directly to the given output stream
:param ostream: File object compatible stream object.
:return: self"""
@@ -145,14 +154,19 @@ class IndexObject(Object):
"""Base for all objects that can be part of the index file , namely Tree, Blob and
SubModule objects"""
+
__slots__ = ("path", "mode")
# for compatibility with iterable lists
- _id_attribute_ = 'path'
-
- def __init__(self,
- repo: 'Repo', binsha: bytes, mode: Union[None, int] = None, path: Union[None, PathLike] = None
- ) -> None:
+ _id_attribute_ = "path"
+
+ def __init__(
+ self,
+ repo: "Repo",
+ binsha: bytes,
+ mode: Union[None, int] = None,
+ path: Union[None, PathLike] = None,
+ ) -> None:
"""Initialize a newly instanced IndexObject
:param repo: is the Repo we are located in
@@ -184,7 +198,8 @@ class IndexObject(Object):
# they cannot be retrieved lateron ( not without searching for them )
raise AttributeError(
"Attribute '%s' unset: path and mode attributes must have been set during %s object creation"
- % (attr, type(self).__name__))
+ % (attr, type(self).__name__)
+ )
else:
super(IndexObject, self)._set_cache_(attr)
# END handle slot attribute
@@ -201,7 +216,7 @@ class IndexObject(Object):
Absolute path to this index object in the file system ( as opposed to the
.path field which is a path relative to the git repository ).
- The returned path will be native to the system and contains '\' on windows. """
+ The returned path will be native to the system and contains '\' on windows."""
if self.repo.working_tree_dir is not None:
return join_path_native(self.repo.working_tree_dir, self.path)
else:
diff --git a/git/objects/blob.py b/git/objects/blob.py
index 99b5c636..1881f210 100644
--- a/git/objects/blob.py
+++ b/git/objects/blob.py
@@ -8,14 +8,15 @@ from . import base
from git.types import Literal
-__all__ = ('Blob', )
+__all__ = ("Blob",)
class Blob(base.IndexObject):
"""A Blob encapsulates a git blob object"""
+
DEFAULT_MIME_TYPE = "text/plain"
- type: Literal['blob'] = "blob"
+ type: Literal["blob"] = "blob"
# valid blob modes
executable_mode = 0o100755
@@ -28,7 +29,7 @@ class Blob(base.IndexObject):
def mime_type(self) -> str:
"""
:return: String describing the mime type of this file (based on the filename)
- :note: Defaults to 'text/plain' in case the actual file type is unknown. """
+ :note: Defaults to 'text/plain' in case the actual file type is unknown."""
guesses = None
if self.path:
guesses = guess_type(str(self.path))
diff --git a/git/objects/commit.py b/git/objects/commit.py
index 96a2a8e5..137cc620 100644
--- a/git/objects/commit.py
+++ b/git/objects/commit.py
@@ -6,12 +6,7 @@
import datetime
from subprocess import Popen, PIPE
from gitdb import IStream
-from git.util import (
- hex_to_bin,
- Actor,
- Stats,
- finalize_process
-)
+from git.util import hex_to_bin, Actor, Stats, finalize_process
from git.diff import Diffable
from git.cmd import Git
@@ -26,13 +21,7 @@ from .util import (
from_timestamp,
)
-from time import (
- time,
- daylight,
- altzone,
- timezone,
- localtime
-)
+from time import time, daylight, altzone, timezone, localtime
import os
from io import BytesIO
import logging
@@ -40,7 +29,18 @@ import logging
# typing ------------------------------------------------------------------
-from typing import Any, IO, Iterator, List, Sequence, Tuple, Union, TYPE_CHECKING, cast, Dict
+from typing import (
+ Any,
+ IO,
+ Iterator,
+ List,
+ Sequence,
+ Tuple,
+ Union,
+ TYPE_CHECKING,
+ cast,
+ Dict,
+)
from git.types import PathLike, Literal
@@ -50,10 +50,10 @@ if TYPE_CHECKING:
# ------------------------------------------------------------------------
-log = logging.getLogger('git.objects.commit')
+log = logging.getLogger("git.objects.commit")
log.addHandler(logging.NullHandler())
-__all__ = ('Commit', )
+__all__ = ("Commit",)
class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
@@ -69,30 +69,44 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
env_committer_date = "GIT_COMMITTER_DATE"
# CONFIGURATION KEYS
- conf_encoding = 'i18n.commitencoding'
+ conf_encoding = "i18n.commitencoding"
# INVARIANTS
default_encoding = "UTF-8"
# object configuration
- type: Literal['commit'] = "commit"
- __slots__ = ("tree",
- "author", "authored_date", "author_tz_offset",
- "committer", "committed_date", "committer_tz_offset",
- "message", "parents", "encoding", "gpgsig")
+ type: Literal["commit"] = "commit"
+ __slots__ = (
+ "tree",
+ "author",
+ "authored_date",
+ "author_tz_offset",
+ "committer",
+ "committed_date",
+ "committer_tz_offset",
+ "message",
+ "parents",
+ "encoding",
+ "gpgsig",
+ )
_id_attribute_ = "hexsha"
- def __init__(self, repo: 'Repo', binsha: bytes, tree: Union[Tree, None] = None,
- author: Union[Actor, None] = None,
- authored_date: Union[int, None] = None,
- author_tz_offset: Union[None, float] = None,
- committer: Union[Actor, None] = None,
- committed_date: Union[int, None] = None,
- committer_tz_offset: Union[None, float] = None,
- message: Union[str, bytes, None] = None,
- parents: Union[Sequence['Commit'], None] = None,
- encoding: Union[str, None] = None,
- gpgsig: Union[str, None] = None) -> None:
+ def __init__(
+ self,
+ repo: "Repo",
+ binsha: bytes,
+ tree: Union[Tree, None] = None,
+ author: Union[Actor, None] = None,
+ authored_date: Union[int, None] = None,
+ author_tz_offset: Union[None, float] = None,
+ committer: Union[Actor, None] = None,
+ committed_date: Union[int, None] = None,
+ committer_tz_offset: Union[None, float] = None,
+ message: Union[str, bytes, None] = None,
+ parents: Union[Sequence["Commit"], None] = None,
+ encoding: Union[str, None] = None,
+ gpgsig: Union[str, None] = None,
+ ) -> None:
"""Instantiate a new Commit. All keyword arguments taking None as default will
be implicitly set on first query.
@@ -130,7 +144,9 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
super(Commit, self).__init__(repo, binsha)
self.binsha = binsha
if tree is not None:
- assert isinstance(tree, Tree), "Tree needs to be a Tree instance, was %s" % type(tree)
+ assert isinstance(
+ tree, Tree
+ ), "Tree needs to be a Tree instance, was %s" % type(tree)
if tree is not None:
self.tree = tree
if author is not None:
@@ -155,16 +171,16 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
self.gpgsig = gpgsig
@classmethod
- def _get_intermediate_items(cls, commit: 'Commit') -> Tuple['Commit', ...]:
+ def _get_intermediate_items(cls, commit: "Commit") -> Tuple["Commit", ...]:
return tuple(commit.parents)
@classmethod
- def _calculate_sha_(cls, repo: 'Repo', commit: 'Commit') -> bytes:
- '''Calculate the sha of a commit.
+ def _calculate_sha_(cls, repo: "Repo", commit: "Commit") -> bytes:
+ """Calculate the sha of a commit.
:param repo: Repo object the commit should be part of
:param commit: Commit object for which to generate the sha
- '''
+ """
stream = BytesIO()
commit._serialize(stream)
@@ -174,18 +190,18 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
istream = repo.odb.store(IStream(cls.type, streamlen, stream))
return istream.binsha
- def replace(self, **kwargs: Any) -> 'Commit':
- '''Create new commit object from existing commit object.
+ def replace(self, **kwargs: Any) -> "Commit":
+ """Create new commit object from existing commit object.
Any values provided as keyword arguments will replace the
corresponding attribute in the new object.
- '''
+ """
attrs = {k: getattr(self, k) for k in self.__slots__}
for attrname in kwargs:
if attrname not in self.__slots__:
- raise ValueError('invalid attribute name')
+ raise ValueError("invalid attribute name")
attrs.update(kwargs)
new_commit = self.__class__(self.repo, self.NULL_BIN_SHA, **attrs)
@@ -214,11 +230,13 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
def summary(self) -> Union[str, bytes]:
""":return: First line of the commit message"""
if isinstance(self.message, str):
- return self.message.split('\n', 1)[0]
+ return self.message.split("\n", 1)[0]
else:
- return self.message.split(b'\n', 1)[0]
+ return self.message.split(b"\n", 1)[0]
- def count(self, paths: Union[PathLike, Sequence[PathLike]] = '', **kwargs: Any) -> int:
+ def count(
+ self, paths: Union[PathLike, Sequence[PathLike]] = "", **kwargs: Any
+ ) -> int:
"""Count the number of commits reachable from this commit
:param paths:
@@ -232,7 +250,9 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# yes, it makes a difference whether empty paths are given or not in our case
# as the empty paths version will ignore merge commits for some reason.
if paths:
- return len(self.repo.git.rev_list(self.hexsha, '--', paths, **kwargs).splitlines())
+ return len(
+ self.repo.git.rev_list(self.hexsha, "--", paths, **kwargs).splitlines()
+ )
return len(self.repo.git.rev_list(self.hexsha, **kwargs).splitlines())
@property
@@ -244,9 +264,13 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
return self.repo.git.name_rev(self)
@classmethod
- def iter_items(cls, repo: 'Repo', rev: Union[str, 'Commit', 'SymbolicReference'], # type: ignore
- paths: Union[PathLike, Sequence[PathLike]] = '', **kwargs: Any
- ) -> Iterator['Commit']:
+ def iter_items(
+ cls,
+ repo: "Repo",
+ rev: Union[str, "Commit", "SymbolicReference"], # type: ignore
+ paths: Union[PathLike, Sequence[PathLike]] = "",
+ **kwargs: Any,
+ ) -> Iterator["Commit"]:
"""Find all commits matching the given criteria.
:param repo: is the Repo
@@ -260,19 +284,21 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
``skip`` is the number of commits to skip
``since`` all commits since i.e. '1970-01-01'
:return: iterator yielding Commit items"""
- if 'pretty' in kwargs:
- raise ValueError("--pretty cannot be used as parsing expects single sha's only")
+ if "pretty" in kwargs:
+ raise ValueError(
+ "--pretty cannot be used as parsing expects single sha's only"
+ )
# END handle pretty
# use -- in any case, to prevent possibility of ambiguous arguments
# see https://github.com/gitpython-developers/GitPython/issues/264
- args_list: List[PathLike] = ['--']
+ args_list: List[PathLike] = ["--"]
if paths:
paths_tup: Tuple[PathLike, ...]
if isinstance(paths, (str, os.PathLike)):
- paths_tup = (paths, )
+ paths_tup = (paths,)
else:
paths_tup = tuple(paths)
@@ -282,37 +308,41 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
proc = repo.git.rev_list(rev, args_list, as_process=True, **kwargs)
return cls._iter_from_process_or_stream(repo, proc)
- def iter_parents(self, paths: Union[PathLike, Sequence[PathLike]] = '', **kwargs: Any) -> Iterator['Commit']:
+ def iter_parents(
+ self, paths: Union[PathLike, Sequence[PathLike]] = "", **kwargs: Any
+ ) -> Iterator["Commit"]:
"""Iterate _all_ parents of this commit.
:param paths:
Optional path or list of paths limiting the Commits to those that
contain at least one of the paths
:param kwargs: All arguments allowed by git-rev-list
- :return: Iterator yielding Commit objects which are parents of self """
+ :return: Iterator yielding Commit objects which are parents of self"""
# skip ourselves
skip = kwargs.get("skip", 1)
- if skip == 0: # skip ourselves
+ if skip == 0: # skip ourselves
skip = 1
- kwargs['skip'] = skip
+ kwargs["skip"] = skip
return self.iter_items(self.repo, self, paths, **kwargs)
- @ property
+ @property
def stats(self) -> Stats:
"""Create a git stat from changes between this commit and its first parent
or from all changes done if this is the very first commit.
:return: git.Stats"""
if not self.parents:
- text = self.repo.git.diff_tree(self.hexsha, '--', numstat=True, root=True)
+ text = self.repo.git.diff_tree(self.hexsha, "--", numstat=True, root=True)
text2 = ""
for line in text.splitlines()[1:]:
(insertions, deletions, filename) = line.split("\t")
text2 += "%s\t%s\t%s\n" % (insertions, deletions, filename)
text = text2
else:
- text = self.repo.git.diff(self.parents[0].hexsha, self.hexsha, '--', numstat=True)
+ text = self.repo.git.diff(
+ self.parents[0].hexsha, self.hexsha, "--", numstat=True
+ )
return Stats._list_from_string(self.repo, text)
@property
@@ -352,19 +382,21 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
"""
d = {}
- cmd = ['git', 'interpret-trailers', '--parse']
+ cmd = ["git", "interpret-trailers", "--parse"]
proc: Git.AutoInterrupt = self.repo.git.execute(cmd, as_process=True, istream=PIPE) # type: ignore
trailer: str = proc.communicate(str(self.message).encode())[0].decode()
- if trailer.endswith('\n'):
+ if trailer.endswith("\n"):
trailer = trailer[0:-1]
- if trailer != '':
- for line in trailer.split('\n'):
- key, value = line.split(':', 1)
+ if trailer != "":
+ for line in trailer.split("\n"):
+ key, value = line.split(":", 1)
d[key.strip()] = value.strip()
return d
- @ classmethod
- def _iter_from_process_or_stream(cls, repo: 'Repo', proc_or_stream: Union[Popen, IO]) -> Iterator['Commit']:
+ @classmethod
+ def _iter_from_process_or_stream(
+ cls, repo: "Repo", proc_or_stream: Union[Popen, IO]
+ ) -> Iterator["Commit"]:
"""Parse out commit information into a list of Commit objects
We expect one-line per commit, and parse the actual commit information directly
from our lighting fast object database
@@ -378,11 +410,11 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# def is_stream(inp) -> TypeGuard[IO]:
# return hasattr(proc_or_stream, 'readline')
- if hasattr(proc_or_stream, 'wait'):
+ if hasattr(proc_or_stream, "wait"):
proc_or_stream = cast(Popen, proc_or_stream)
if proc_or_stream.stdout is not None:
stream = proc_or_stream.stdout
- elif hasattr(proc_or_stream, 'readline'):
+ elif hasattr(proc_or_stream, "readline"):
proc_or_stream = cast(IO, proc_or_stream)
stream = proc_or_stream
@@ -402,15 +434,23 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# END for each line in stream
# TODO: Review this - it seems process handling got a bit out of control
# due to many developers trying to fix the open file handles issue
- if hasattr(proc_or_stream, 'wait'):
+ if hasattr(proc_or_stream, "wait"):
proc_or_stream = cast(Popen, proc_or_stream)
finalize_process(proc_or_stream)
- @ classmethod
- def create_from_tree(cls, repo: 'Repo', tree: Union[Tree, str], message: str,
- parent_commits: Union[None, List['Commit']] = None, head: bool = False,
- author: Union[None, Actor] = None, committer: Union[None, Actor] = None,
- author_date: Union[None, str] = None, commit_date: Union[None, str] = None) -> 'Commit':
+ @classmethod
+ def create_from_tree(
+ cls,
+ repo: "Repo",
+ tree: Union[Tree, str],
+ message: str,
+ parent_commits: Union[None, List["Commit"]] = None,
+ head: bool = False,
+ author: Union[None, Actor] = None,
+ committer: Union[None, Actor] = None,
+ author_date: Union[None, str] = None,
+ commit_date: Union[None, str] = None,
+ ) -> "Commit":
"""Commit the given tree, creating a commit object.
:param repo: Repo object the commit should be part of
@@ -473,7 +513,7 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
is_dst = daylight and localtime().tm_isdst > 0
offset = altzone if is_dst else timezone
- author_date_str = env.get(cls.env_author_date, '')
+ author_date_str = env.get(cls.env_author_date, "")
if author_date:
author_time, author_offset = parse_date(author_date)
elif author_date_str:
@@ -482,7 +522,7 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
author_time, author_offset = unix_time, offset
# END set author time
- committer_date_str = env.get(cls.env_committer_date, '')
+ committer_date_str = env.get(cls.env_committer_date, "")
if commit_date:
committer_time, committer_offset = parse_date(commit_date)
elif committer_date_str:
@@ -492,7 +532,7 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# END set committer time
# assume utf8 encoding
- enc_section, enc_option = cls.conf_encoding.split('.')
+ enc_section, enc_option = cls.conf_encoding.split(".")
conf_encoding = cr.get_value(enc_section, enc_option, cls.default_encoding)
if not isinstance(conf_encoding, str):
raise TypeError("conf_encoding could not be coerced to str")
@@ -504,10 +544,20 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# END tree conversion
# CREATE NEW COMMIT
- new_commit = cls(repo, cls.NULL_BIN_SHA, tree,
- author, author_time, author_offset,
- committer, committer_time, committer_offset,
- message, parent_commits, conf_encoding)
+ new_commit = cls(
+ repo,
+ cls.NULL_BIN_SHA,
+ tree,
+ author,
+ author_time,
+ author_offset,
+ committer,
+ committer_time,
+ committer_offset,
+ message,
+ parent_commits,
+ conf_encoding,
+ )
new_commit.binsha = cls._calculate_sha_(repo, new_commit)
@@ -515,48 +565,74 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# need late import here, importing git at the very beginning throws
# as well ...
import git.refs
+
try:
repo.head.set_commit(new_commit, logmsg=message)
except ValueError:
# head is not yet set to the ref our HEAD points to
# Happens on first commit
- master = git.refs.Head.create(repo, repo.head.ref, new_commit, logmsg="commit (initial): %s" % message)
- repo.head.set_reference(master, logmsg='commit: Switching to %s' % master)
+ master = git.refs.Head.create(
+ repo,
+ repo.head.ref,
+ new_commit,
+ logmsg="commit (initial): %s" % message,
+ )
+ repo.head.set_reference(
+ master, logmsg="commit: Switching to %s" % master
+ )
# END handle empty repositories
# END advance head handling
return new_commit
- #{ Serializable Implementation
+ # { Serializable Implementation
- def _serialize(self, stream: BytesIO) -> 'Commit':
+ def _serialize(self, stream: BytesIO) -> "Commit":
write = stream.write
- write(("tree %s\n" % self.tree).encode('ascii'))
+ write(("tree %s\n" % self.tree).encode("ascii"))
for p in self.parents:
- write(("parent %s\n" % p).encode('ascii'))
+ write(("parent %s\n" % p).encode("ascii"))
a = self.author
aname = a.name
c = self.committer
fmt = "%s %s <%s> %s %s\n"
- write((fmt % ("author", aname, a.email,
- self.authored_date,
- altz_to_utctz_str(self.author_tz_offset))).encode(self.encoding))
+ write(
+ (
+ fmt
+ % (
+ "author",
+ aname,
+ a.email,
+ self.authored_date,
+ altz_to_utctz_str(self.author_tz_offset),
+ )
+ ).encode(self.encoding)
+ )
# encode committer
aname = c.name
- write((fmt % ("committer", aname, c.email,
- self.committed_date,
- altz_to_utctz_str(self.committer_tz_offset))).encode(self.encoding))
+ write(
+ (
+ fmt
+ % (
+ "committer",
+ aname,
+ c.email,
+ self.committed_date,
+ altz_to_utctz_str(self.committer_tz_offset),
+ )
+ ).encode(self.encoding)
+ )
if self.encoding != self.default_encoding:
- write(("encoding %s\n" % self.encoding).encode('ascii'))
+ write(("encoding %s\n" % self.encoding).encode("ascii"))
try:
- if self.__getattribute__('gpgsig'):
+ if self.__getattribute__("gpgsig"):
write(b"gpgsig")
for sigline in self.gpgsig.rstrip("\n").split("\n"):
- write((" " + sigline + "\n").encode('ascii'))
+ write((" " + sigline + "\n").encode("ascii"))
except AttributeError:
pass
@@ -570,23 +646,29 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# END handle encoding
return self
- def _deserialize(self, stream: BytesIO) -> 'Commit':
+ def _deserialize(self, stream: BytesIO) -> "Commit":
"""
:param from_rev_list: if true, the stream format is coming from the rev-list command
Otherwise it is assumed to be a plain data stream from our object
"""
readline = stream.readline
- self.tree = Tree(self.repo, hex_to_bin(readline().split()[1]), Tree.tree_id << 12, '')
+ self.tree = Tree(
+ self.repo, hex_to_bin(readline().split()[1]), Tree.tree_id << 12, ""
+ )
self.parents = []
next_line = None
while True:
parent_line = readline()
- if not parent_line.startswith(b'parent'):
+ if not parent_line.startswith(b"parent"):
next_line = parent_line
break
# END abort reading parents
- self.parents.append(type(self)(self.repo, hex_to_bin(parent_line.split()[-1].decode('ascii'))))
+ self.parents.append(
+ type(self)(
+ self.repo, hex_to_bin(parent_line.split()[-1].decode("ascii"))
+ )
+ )
# END for each parent line
self.parents = tuple(self.parents)
@@ -596,9 +678,9 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
# we might run into one or more mergetag blocks, skip those for now
next_line = readline()
- while next_line.startswith(b'mergetag '):
+ while next_line.startswith(b"mergetag "):
next_line = readline()
- while next_line.startswith(b' '):
+ while next_line.startswith(b" "):
next_line = readline()
# end skip mergetags
@@ -612,10 +694,11 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
buf = enc.strip()
while buf:
if buf[0:10] == b"encoding ":
- self.encoding = buf[buf.find(b' ') + 1:].decode(
- self.encoding, 'ignore')
+ self.encoding = buf[buf.find(b" ") + 1 :].decode(
+ self.encoding, "ignore"
+ )
elif buf[0:7] == b"gpgsig ":
- sig = buf[buf.find(b' ') + 1:] + b"\n"
+ sig = buf[buf.find(b" ") + 1 :] + b"\n"
is_next_header = False
while True:
sigbuf = readline()
@@ -627,37 +710,55 @@ class Commit(base.Object, TraversableIterableObj, Diffable, Serializable):
break
sig += sigbuf[1:]
# end read all signature
- self.gpgsig = sig.rstrip(b"\n").decode(self.encoding, 'ignore')
+ self.gpgsig = sig.rstrip(b"\n").decode(self.encoding, "ignore")
if is_next_header:
continue
buf = readline().strip()
# decode the authors name
try:
- (self.author, self.authored_date, self.author_tz_offset) = \
- parse_actor_and_date(author_line.decode(self.encoding, 'replace'))
+ (
+ self.author,
+ self.authored_date,
+ self.author_tz_offset,
+ ) = parse_actor_and_date(author_line.decode(self.encoding, "replace"))
except UnicodeDecodeError:
- log.error("Failed to decode author line '%s' using encoding %s", author_line, self.encoding,
- exc_info=True)
+ log.error(
+ "Failed to decode author line '%s' using encoding %s",
+ author_line,
+ self.encoding,
+ exc_info=True,
+ )
try:
- self.committer, self.committed_date, self.committer_tz_offset = \
- parse_actor_and_date(committer_line.decode(self.encoding, 'replace'))
+ (
+ self.committer,
+ self.committed_date,
+ self.committer_tz_offset,
+ ) = parse_actor_and_date(committer_line.decode(self.encoding, "replace"))
except UnicodeDecodeError:
- log.error("Failed to decode committer line '%s' using encoding %s", committer_line, self.encoding,
- exc_info=True)
+ log.error(
+ "Failed to decode committer line '%s' using encoding %s",
+ committer_line,
+ self.encoding,
+ exc_info=True,
+ )
# END handle author's encoding
# a stream from our data simply gives us the plain message
# The end of our message stream is marked with a newline that we strip
self.message = stream.read()
try:
- self.message = self.message.decode(self.encoding, 'replace')
+ self.message = self.message.decode(self.encoding, "replace")
except UnicodeDecodeError:
- log.error("Failed to decode message '%s' using encoding %s",
- self.message, self.encoding, exc_info=True)
+ log.error(
+ "Failed to decode message '%s' using encoding %s",
+ self.message,
+ self.encoding,
+ exc_info=True,
+ )
# END exception handling
return self
- #} END serializable implementation
+ # } END serializable implementation
diff --git a/git/objects/fun.py b/git/objects/fun.py
index 19b4e525..de065599 100644
--- a/git/objects/fun.py
+++ b/git/objects/fun.py
@@ -2,14 +2,20 @@
from stat import S_ISDIR
-from git.compat import (
- safe_decode,
- defenc
-)
+from git.compat import safe_decode, defenc
# typing ----------------------------------------------
-from typing import Callable, List, MutableSequence, Sequence, Tuple, TYPE_CHECKING, Union, overload
+from typing import (
+ Callable,
+ List,
+ MutableSequence,
+ Sequence,
+ Tuple,
+ TYPE_CHECKING,
+ Union,
+ overload,
+)
if TYPE_CHECKING:
from _typeshed import ReadableBuffer
@@ -21,19 +27,25 @@ EntryTupOrNone = Union[EntryTup, None]
# ---------------------------------------------------
-__all__ = ('tree_to_stream', 'tree_entries_from_data', 'traverse_trees_recursive',
- 'traverse_tree_recursive')
+__all__ = (
+ "tree_to_stream",
+ "tree_entries_from_data",
+ "traverse_trees_recursive",
+ "traverse_tree_recursive",
+)
-def tree_to_stream(entries: Sequence[EntryTup], write: Callable[['ReadableBuffer'], Union[int, None]]) -> None:
+def tree_to_stream(
+ entries: Sequence[EntryTup], write: Callable[["ReadableBuffer"], Union[int, None]]
+) -> None:
"""Write the give list of entries into a stream using its write method
:param entries: **sorted** list of tuples with (binsha, mode, name)
:param write: write method which takes a data string"""
- ord_zero = ord('0')
- bit_mask = 7 # 3 bits set
+ ord_zero = ord("0")
+ bit_mask = 7 # 3 bits set
for binsha, mode, name in entries:
- mode_str = b''
+ mode_str = b""
for i in range(6):
mode_str = bytes([((mode >> (i * 3)) & bit_mask) + ord_zero]) + mode_str
# END for each 8 octal value
@@ -52,7 +64,7 @@ def tree_to_stream(entries: Sequence[EntryTup], write: Callable[['ReadableBuffer
name_bytes = name.encode(defenc)
else:
name_bytes = name # type: ignore[unreachable] # check runtime types - is always str?
- write(b''.join((mode_str, b' ', name_bytes, b'\0', binsha)))
+ write(b"".join((mode_str, b" ", name_bytes, b"\0", binsha)))
# END for each item
@@ -60,8 +72,8 @@ def tree_entries_from_data(data: bytes) -> List[EntryTup]:
"""Reads the binary representation of a tree and returns tuples of Tree items
:param data: data block with tree data (as bytes)
:return: list(tuple(binsha, mode, tree_relative_path), ...)"""
- ord_zero = ord('0')
- space_ord = ord(' ')
+ ord_zero = ord("0")
+ space_ord = ord(" ")
len_data = len(data)
i = 0
out = []
@@ -95,15 +107,16 @@ def tree_entries_from_data(data: bytes) -> List[EntryTup]:
# byte is NULL, get next 20
i += 1
- sha = data[i:i + 20]
+ sha = data[i : i + 20]
i = i + 20
out.append((sha, mode, name))
# END for each byte in data stream
return out
-def _find_by_name(tree_data: MutableSequence[EntryTupOrNone], name: str, is_dir: bool, start_at: int
- ) -> EntryTupOrNone:
+def _find_by_name(
+ tree_data: MutableSequence[EntryTupOrNone], name: str, is_dir: bool, start_at: int
+) -> EntryTupOrNone:
"""return data entry matching the given name and tree mode
or None.
Before the item is returned, the respective data item is set
@@ -126,12 +139,12 @@ def _find_by_name(tree_data: MutableSequence[EntryTupOrNone], name: str, is_dir:
return None
-@ overload
+@overload
def _to_full_path(item: None, path_prefix: str) -> None:
...
-@ overload
+@overload
def _to_full_path(item: EntryTup, path_prefix: str) -> EntryTup:
...
@@ -143,8 +156,9 @@ def _to_full_path(item: EntryTupOrNone, path_prefix: str) -> EntryTupOrNone:
return (item[0], item[1], path_prefix + item[2])
-def traverse_trees_recursive(odb: 'GitCmdObjectDB', tree_shas: Sequence[Union[bytes, None]],
- path_prefix: str) -> List[Tuple[EntryTupOrNone, ...]]:
+def traverse_trees_recursive(
+ odb: "GitCmdObjectDB", tree_shas: Sequence[Union[bytes, None]], path_prefix: str
+) -> List[Tuple[EntryTupOrNone, ...]]:
"""
:return: list of list with entries according to the given binary tree-shas.
The result is encoded in a list
@@ -187,7 +201,7 @@ def traverse_trees_recursive(odb: 'GitCmdObjectDB', tree_shas: Sequence[Union[by
entries = [None for _ in range(nt)]
entries[ti] = item
_sha, mode, name = item
- is_dir = S_ISDIR(mode) # type mode bits
+ is_dir = S_ISDIR(mode) # type mode bits
# find this item in all other tree data items
# wrap around, but stop one before our current index, hence
@@ -199,8 +213,13 @@ def traverse_trees_recursive(odb: 'GitCmdObjectDB', tree_shas: Sequence[Union[by
# END for each other item data
# if we are a directory, enter recursion
if is_dir:
- out.extend(traverse_trees_recursive(
- odb, [((ei and ei[0]) or None) for ei in entries], path_prefix + name + '/'))
+ out.extend(
+ traverse_trees_recursive(
+ odb,
+ [((ei and ei[0]) or None) for ei in entries],
+ path_prefix + name + "/",
+ )
+ )
else:
out.append(tuple(_to_full_path(e, path_prefix) for e in entries))
@@ -210,12 +229,14 @@ def traverse_trees_recursive(odb: 'GitCmdObjectDB', tree_shas: Sequence[Union[by
# END for each item
# we are done with one tree, set all its data empty
- del(tree_data[:])
+ del tree_data[:]
# END for each tree_data chunk
return out
-def traverse_tree_recursive(odb: 'GitCmdObjectDB', tree_sha: bytes, path_prefix: str) -> List[EntryTup]:
+def traverse_tree_recursive(
+ odb: "GitCmdObjectDB", tree_sha: bytes, path_prefix: str
+) -> List[EntryTup]:
"""
:return: list of entries of the tree pointed to by the binary tree_sha. An entry
has the following format:
@@ -229,7 +250,7 @@ def traverse_tree_recursive(odb: 'GitCmdObjectDB', tree_sha: bytes, path_prefix:
# unpacking/packing is faster than accessing individual items
for sha, mode, name in data:
if S_ISDIR(mode):
- entries.extend(traverse_tree_recursive(odb, sha, path_prefix + name + '/'))
+ entries.extend(traverse_tree_recursive(odb, sha, path_prefix + name + "/"))
else:
entries.append((sha, mode, path_prefix + name))
# END for each item
diff --git a/git/objects/submodule/base.py b/git/objects/submodule/base.py
index f7820455..84a34206 100644
--- a/git/objects/submodule/base.py
+++ b/git/objects/submodule/base.py
@@ -11,16 +11,12 @@ from git.compat import (
defenc,
is_win,
)
-from git.config import (
- SectionConstraint,
- GitConfigParser,
- cp
-)
+from git.config import SectionConstraint, GitConfigParser, cp
from git.exc import (
InvalidGitRepositoryError,
NoSuchPathError,
RepositoryDirtyError,
- BadName
+ BadName,
)
from git.objects.base import IndexObject, Object
from git.objects.util import TraversableIterableObj
@@ -31,7 +27,7 @@ from git.util import (
RemoteProgress,
rmtree,
unbare_repo,
- IterableList
+ IterableList,
)
from git.util import HIDE_WINDOWS_KNOWN_ERRORS
@@ -42,7 +38,7 @@ from .util import (
sm_name,
sm_section,
SubmoduleConfigParser,
- find_first_remote_branch
+ find_first_remote_branch,
)
@@ -63,7 +59,7 @@ if TYPE_CHECKING:
__all__ = ["Submodule", "UpdateProgress"]
-log = logging.getLogger('git.objects.submodule.base')
+log = logging.getLogger("git.objects.submodule.base")
log.addHandler(logging.NullHandler())
@@ -71,7 +67,11 @@ class UpdateProgress(RemoteProgress):
"""Class providing detailed progress information to the caller who should
derive from it and implement the ``update(...)`` message"""
- CLONE, FETCH, UPDWKTREE = [1 << x for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes + 3)]
+
+ CLONE, FETCH, UPDWKTREE = [
+ 1 << x
+ for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes + 3)
+ ]
_num_op_codes: int = RemoteProgress._num_op_codes + 3
__slots__ = ()
@@ -98,25 +98,30 @@ class Submodule(IndexObject, TraversableIterableObj):
All methods work in bare and non-bare repositories."""
_id_attribute_ = "name"
- k_modules_file = '.gitmodules'
- k_head_option = 'branch'
- k_head_default = 'master'
- k_default_mode = stat.S_IFDIR | stat.S_IFLNK # submodules are directories with link-status
+ k_modules_file = ".gitmodules"
+ k_head_option = "branch"
+ k_head_default = "master"
+ k_default_mode = (
+ stat.S_IFDIR | stat.S_IFLNK
+ ) # submodules are directories with link-status
# this is a bogus type for base class compatibility
- type: Literal['submodule'] = 'submodule' # type: ignore
-
- __slots__ = ('_parent_commit', '_url', '_branch_path', '_name', '__weakref__')
- _cache_attrs = ('path', '_url', '_branch_path')
-
- def __init__(self, repo: 'Repo', binsha: bytes,
- mode: Union[int, None] = None,
- path: Union[PathLike, None] = None,
- name: Union[str, None] = None,
- parent_commit: Union[Commit_ish, None] = None,
- url: Union[str, None] = None,
- branch_path: Union[PathLike, None] = None
- ) -> None:
+ type: Literal["submodule"] = "submodule" # type: ignore
+
+ __slots__ = ("_parent_commit", "_url", "_branch_path", "_name", "__weakref__")
+ _cache_attrs = ("path", "_url", "_branch_path")
+
+ def __init__(
+ self,
+ repo: "Repo",
+ binsha: bytes,
+ mode: Union[int, None] = None,
+ path: Union[PathLike, None] = None,
+ name: Union[str, None] = None,
+ parent_commit: Union[Commit_ish, None] = None,
+ url: Union[str, None] = None,
+ branch_path: Union[PathLike, None] = None,
+ ) -> None:
"""Initialize this instance with its attributes. We only document the ones
that differ from ``IndexObject``
@@ -137,32 +142,38 @@ class Submodule(IndexObject, TraversableIterableObj):
self._name = name
def _set_cache_(self, attr: str) -> None:
- if attr in ('path', '_url', '_branch_path'):
+ if attr in ("path", "_url", "_branch_path"):
reader: SectionConstraint = self.config_reader()
# default submodule values
try:
- self.path = reader.get('path')
+ self.path = reader.get("path")
except cp.NoSectionError as e:
if self.repo.working_tree_dir is not None:
- raise ValueError("This submodule instance does not exist anymore in '%s' file"
- % osp.join(self.repo.working_tree_dir, '.gitmodules')) from e
+ raise ValueError(
+ "This submodule instance does not exist anymore in '%s' file"
+ % osp.join(self.repo.working_tree_dir, ".gitmodules")
+ ) from e
# end
- self._url = reader.get('url')
+ self._url = reader.get("url")
# git-python extension values - optional
- self._branch_path = reader.get_value(self.k_head_option, git.Head.to_full_path(self.k_head_default))
- elif attr == '_name':
- raise AttributeError("Cannot retrieve the name of a submodule if it was not set initially")
+ self._branch_path = reader.get_value(
+ self.k_head_option, git.Head.to_full_path(self.k_head_default)
+ )
+ elif attr == "_name":
+ raise AttributeError(
+ "Cannot retrieve the name of a submodule if it was not set initially"
+ )
else:
super(Submodule, self)._set_cache_(attr)
# END handle attribute name
@classmethod
- def _get_intermediate_items(cls, item: 'Submodule') -> IterableList['Submodule']:
+ def _get_intermediate_items(cls, item: "Submodule") -> IterableList["Submodule"]:
""":return: all the submodules of our module repository"""
try:
return cls.list_items(item.module())
except InvalidGitRepositoryError:
- return IterableList('')
+ return IterableList("")
# END handle intermediate items
@classmethod
@@ -188,13 +199,18 @@ class Submodule(IndexObject, TraversableIterableObj):
return self._name
def __repr__(self) -> str:
- return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)"\
- % (type(self).__name__, self._name, self.path, self.url, self.branch_path)
+ return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)" % (
+ type(self).__name__,
+ self._name,
+ self.path,
+ self.url,
+ self.branch_path,
+ )
@classmethod
- def _config_parser(cls, repo: 'Repo',
- parent_commit: Union[Commit_ish, None],
- read_only: bool) -> SubmoduleConfigParser:
+ def _config_parser(
+ cls, repo: "Repo", parent_commit: Union[Commit_ish, None], read_only: bool
+ ) -> SubmoduleConfigParser:
""":return: Config Parser constrained to our submodule in read or write mode
:raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
at the given parent commit. Otherwise the exception would be delayed until the first
@@ -211,17 +227,23 @@ class Submodule(IndexObject, TraversableIterableObj):
if not repo.bare and parent_matches_head and repo.working_tree_dir:
fp_module = osp.join(repo.working_tree_dir, cls.k_modules_file)
else:
- assert parent_commit is not None, "need valid parent_commit in bare repositories"
+ assert (
+ parent_commit is not None
+ ), "need valid parent_commit in bare repositories"
try:
fp_module = cls._sio_modules(parent_commit)
except KeyError as e:
- raise IOError("Could not find %s file in the tree of parent commit %s" %
- (cls.k_modules_file, parent_commit)) from e
+ raise IOError(
+ "Could not find %s file in the tree of parent commit %s"
+ % (cls.k_modules_file, parent_commit)
+ ) from e
# END handle exceptions
# END handle non-bare working tree
if not read_only and (repo.bare or not parent_matches_head):
- raise ValueError("Cannot write blobs of 'historical' submodule configurations")
+ raise ValueError(
+ "Cannot write blobs of 'historical' submodule configurations"
+ )
# END handle writes of historical submodules
return SubmoduleConfigParser(fp_module, read_only=read_only)
@@ -246,7 +268,7 @@ class Submodule(IndexObject, TraversableIterableObj):
def _config_parser_constrained(self, read_only: bool) -> SectionConstraint:
""":return: Config Parser constrained to our submodule in read or write mode"""
try:
- pc: Union['Commit_ish', None] = self.parent_commit
+ pc: Union["Commit_ish", None] = self.parent_commit
except ValueError:
pc = None
# end handle empty parent repository
@@ -255,16 +277,20 @@ class Submodule(IndexObject, TraversableIterableObj):
return SectionConstraint(parser, sm_section(self.name))
@classmethod
- def _module_abspath(cls, parent_repo: 'Repo', path: PathLike, name: str) -> PathLike:
+ def _module_abspath(
+ cls, parent_repo: "Repo", path: PathLike, name: str
+ ) -> PathLike:
if cls._need_gitfile_submodules(parent_repo.git):
- return osp.join(parent_repo.git_dir, 'modules', name)
+ return osp.join(parent_repo.git_dir, "modules", name)
if parent_repo.working_tree_dir:
return osp.join(parent_repo.working_tree_dir, path)
raise NotADirectoryError()
# end
@classmethod
- def _clone_repo(cls, repo: 'Repo', url: str, path: PathLike, name: str, **kwargs: Any) -> 'Repo':
+ def _clone_repo(
+ cls, repo: "Repo", url: str, path: PathLike, name: str, **kwargs: Any
+ ) -> "Repo":
""":return: Repo instance of newly cloned repository
:param repo: our parent repository
:param url: url to clone from
@@ -274,7 +300,7 @@ class Submodule(IndexObject, TraversableIterableObj):
module_abspath = cls._module_abspath(repo, path, name)
module_checkout_path = module_abspath
if cls._need_gitfile_submodules(repo.git):
- kwargs['separate_git_dir'] = module_abspath
+ kwargs["separate_git_dir"] = module_abspath
module_abspath_dir = osp.dirname(module_abspath)
if not osp.isdir(module_abspath_dir):
os.makedirs(module_abspath_dir)
@@ -288,29 +314,36 @@ class Submodule(IndexObject, TraversableIterableObj):
return clone
@classmethod
- def _to_relative_path(cls, parent_repo: 'Repo', path: PathLike) -> PathLike:
+ def _to_relative_path(cls, parent_repo: "Repo", path: PathLike) -> PathLike:
""":return: a path guaranteed to be relative to the given parent - repository
:raise ValueError: if path is not contained in the parent repository's working tree"""
path = to_native_path_linux(path)
- if path.endswith('/'):
+ if path.endswith("/"):
path = path[:-1]
# END handle trailing slash
if osp.isabs(path) and parent_repo.working_tree_dir:
working_tree_linux = to_native_path_linux(parent_repo.working_tree_dir)
if not path.startswith(working_tree_linux):
- raise ValueError("Submodule checkout path '%s' needs to be within the parents repository at '%s'"
- % (working_tree_linux, path))
- path = path[len(working_tree_linux.rstrip('/')) + 1:]
+ raise ValueError(
+ "Submodule checkout path '%s' needs to be within the parents repository at '%s'"
+ % (working_tree_linux, path)
+ )
+ path = path[len(working_tree_linux.rstrip("/")) + 1 :]
if not path:
- raise ValueError("Absolute submodule path '%s' didn't yield a valid relative path" % path)
+ raise ValueError(
+ "Absolute submodule path '%s' didn't yield a valid relative path"
+ % path
+ )
# end verify converted relative path makes sense
# end convert to a relative path
return path
@classmethod
- def _write_git_file_and_module_config(cls, working_tree_dir: PathLike, module_abspath: PathLike) -> None:
+ def _write_git_file_and_module_config(
+ cls, working_tree_dir: PathLike, module_abspath: PathLike
+ ) -> None:
"""Writes a .git file containing a(preferably) relative path to the actual git module repository.
It is an error if the module_abspath cannot be made into a relative path, relative to the working_tree_dir
:note: will overwrite existing files !
@@ -320,26 +353,40 @@ class Submodule(IndexObject, TraversableIterableObj):
:param working_tree_dir: directory to write the .git file into
:param module_abspath: absolute path to the bare repository
"""
- git_file = osp.join(working_tree_dir, '.git')
+ git_file = osp.join(working_tree_dir, ".git")
rela_path = osp.relpath(module_abspath, start=working_tree_dir)
if is_win:
if osp.isfile(git_file):
os.remove(git_file)
- with open(git_file, 'wb') as fp:
+ with open(git_file, "wb") as fp:
fp.write(("gitdir: %s" % rela_path).encode(defenc))
- with GitConfigParser(osp.join(module_abspath, 'config'),
- read_only=False, merge_includes=False) as writer:
- writer.set_value('core', 'worktree',
- to_native_path_linux(osp.relpath(working_tree_dir, start=module_abspath)))
+ with GitConfigParser(
+ osp.join(module_abspath, "config"), read_only=False, merge_includes=False
+ ) as writer:
+ writer.set_value(
+ "core",
+ "worktree",
+ to_native_path_linux(
+ osp.relpath(working_tree_dir, start=module_abspath)
+ ),
+ )
- #{ Edit Interface
+ # { Edit Interface
@classmethod
- def add(cls, repo: 'Repo', name: str, path: PathLike, url: Union[str, None] = None,
- branch: Union[str, None] = None, no_checkout: bool = False, depth: Union[int, None] = None,
- env: Union[Mapping[str, str], None] = None, clone_multi_options: Union[Sequence[TBD], None] = None
- ) -> 'Submodule':
+ def add(
+ cls,
+ repo: "Repo",
+ name: str,
+ path: PathLike,
+ url: Union[str, None] = None,
+ branch: Union[str, None] = None,
+ no_checkout: bool = False,
+ depth: Union[int, None] = None,
+ env: Union[Mapping[str, str], None] = None,
+ clone_multi_options: Union[Sequence[TBD], None] = None,
+ ) -> "Submodule":
"""Add a new submodule to the given repository. This will alter the index
as well as the .gitmodules file, but will not create a new commit.
If the submodule already exists, no matter if the configuration differs
@@ -379,7 +426,9 @@ class Submodule(IndexObject, TraversableIterableObj):
update fails for instance"""
if repo.bare:
- raise InvalidGitRepositoryError("Cannot add submodules to bare repositories")
+ raise InvalidGitRepositoryError(
+ "Cannot add submodules to bare repositories"
+ )
# END handle bare repos
path = cls._to_relative_path(repo, path)
@@ -391,7 +440,14 @@ class Submodule(IndexObject, TraversableIterableObj):
# END assure url correctness
# INSTANTIATE INTERMEDIATE SM
- sm = cls(repo, cls.NULL_BIN_SHA, cls.k_default_mode, path, name, url='invalid-temporary')
+ sm = cls(
+ repo,
+ cls.NULL_BIN_SHA,
+ cls.k_default_mode,
+ path,
+ name,
+ url="invalid-temporary",
+ )
if sm.exists():
# reretrieve submodule from tree
try:
@@ -414,7 +470,9 @@ class Submodule(IndexObject, TraversableIterableObj):
if has_module and url is not None:
if url not in [r.url for r in sm.module().remotes]:
raise ValueError(
- "Specified URL '%s' does not match any remote url of the repository at '%s'" % (url, sm.abspath))
+ "Specified URL '%s' does not match any remote url of the repository at '%s'"
+ % (url, sm.abspath)
+ )
# END check url
# END verify urls match
@@ -422,29 +480,33 @@ class Submodule(IndexObject, TraversableIterableObj):
if url is None:
if not has_module:
- raise ValueError("A URL was not given and a repository did not exist at %s" % path)
+ raise ValueError(
+ "A URL was not given and a repository did not exist at %s" % path
+ )
# END check url
mrepo = sm.module()
# assert isinstance(mrepo, git.Repo)
urls = [r.url for r in mrepo.remotes]
if not urls:
- raise ValueError("Didn't find any remote url in repository at %s" % sm.abspath)
+ raise ValueError(
+ "Didn't find any remote url in repository at %s" % sm.abspath
+ )
# END verify we have url
url = urls[0]
else:
# clone new repo
- kwargs: Dict[str, Union[bool, int, str, Sequence[TBD]]] = {'n': no_checkout}
+ kwargs: Dict[str, Union[bool, int, str, Sequence[TBD]]] = {"n": no_checkout}
if not branch_is_default:
- kwargs['b'] = br.name
+ kwargs["b"] = br.name
# END setup checkout-branch
if depth:
if isinstance(depth, int):
- kwargs['depth'] = depth
+ kwargs["depth"] = depth
else:
raise ValueError("depth should be an integer")
if clone_multi_options:
- kwargs['multi_options'] = clone_multi_options
+ kwargs["multi_options"] = clone_multi_options
# _clone_repo(cls, repo, url, path, name, **kwargs):
mrepo = cls._clone_repo(repo, url, path, name, env=env, **kwargs)
@@ -460,13 +522,13 @@ class Submodule(IndexObject, TraversableIterableObj):
writer: Union[GitConfigParser, SectionConstraint]
with sm.repo.config_writer() as writer:
- writer.set_value(sm_section(name), 'url', url)
+ writer.set_value(sm_section(name), "url", url)
# update configuration and index
index = sm.repo.index
with sm.config_writer(index=index, write=False) as writer:
- writer.set_value('url', url)
- writer.set_value('path', path)
+ writer.set_value("url", url)
+ writer.set_value("path", path)
sm._url = url
if not branch_is_default:
@@ -481,10 +543,18 @@ class Submodule(IndexObject, TraversableIterableObj):
return sm
- def update(self, recursive: bool = False, init: bool = True, to_latest_revision: bool = False,
- progress: Union['UpdateProgress', None] = None, dry_run: bool = False,
- force: bool = False, keep_going: bool = False, env: Union[Mapping[str, str], None] = None,
- clone_multi_options: Union[Sequence[TBD], None] = None) -> 'Submodule':
+ def update(
+ self,
+ recursive: bool = False,
+ init: bool = True,
+ to_latest_revision: bool = False,
+ progress: Union["UpdateProgress", None] = None,
+ dry_run: bool = False,
+ force: bool = False,
+ keep_going: bool = False,
+ env: Union[Mapping[str, str], None] = None,
+ clone_multi_options: Union[Sequence[TBD], None] = None,
+ ) -> "Submodule":
"""Update the repository of this submodule to point to the checkout
we point at with the binsha of this instance.
@@ -527,7 +597,7 @@ class Submodule(IndexObject, TraversableIterableObj):
if progress is None:
progress = UpdateProgress()
# END handle progress
- prefix = ''
+ prefix = ""
if dry_run:
prefix = "DRY-RUN: "
# END handle prefix
@@ -550,17 +620,27 @@ class Submodule(IndexObject, TraversableIterableObj):
op |= BEGIN
# END handle start
- progress.update(op, i, len_rmts, prefix + "Fetching remote %s of submodule %r"
- % (remote, self.name))
- #===============================
+ progress.update(
+ op,
+ i,
+ len_rmts,
+ prefix
+ + "Fetching remote %s of submodule %r" % (remote, self.name),
+ )
+ # ===============================
if not dry_run:
remote.fetch(progress=progress)
# END handle dry-run
- #===============================
+ # ===============================
if i == len_rmts - 1:
op |= END
# END handle end
- progress.update(op, i, len_rmts, prefix + "Done fetching remote of submodule %r" % self.name)
+ progress.update(
+ op,
+ i,
+ len_rmts,
+ prefix + "Done fetching remote of submodule %r" % self.name,
+ )
# END fetch new data
except InvalidGitRepositoryError:
mrepo = None
@@ -574,27 +654,49 @@ class Submodule(IndexObject, TraversableIterableObj):
try:
os.rmdir(checkout_module_abspath)
except OSError as e:
- raise OSError("Module directory at %r does already exist and is non-empty"
- % checkout_module_abspath) from e
+ raise OSError(
+ "Module directory at %r does already exist and is non-empty"
+ % checkout_module_abspath
+ ) from e
# END handle OSError
# END handle directory removal
# don't check it out at first - nonetheless it will create a local
# branch according to the remote-HEAD if possible
- progress.update(BEGIN | CLONE, 0, 1, prefix + "Cloning url '%s' to '%s' in submodule %r" %
- (self.url, checkout_module_abspath, self.name))
+ progress.update(
+ BEGIN | CLONE,
+ 0,
+ 1,
+ prefix
+ + "Cloning url '%s' to '%s' in submodule %r"
+ % (self.url, checkout_module_abspath, self.name),
+ )
if not dry_run:
- mrepo = self._clone_repo(self.repo, self.url, self.path, self.name, n=True, env=env,
- multi_options=clone_multi_options)
+ mrepo = self._clone_repo(
+ self.repo,
+ self.url,
+ self.path,
+ self.name,
+ n=True,
+ env=env,
+ multi_options=clone_multi_options,
+ )
# END handle dry-run
- progress.update(END | CLONE, 0, 1, prefix + "Done cloning to %s" % checkout_module_abspath)
+ progress.update(
+ END | CLONE,
+ 0,
+ 1,
+ prefix + "Done cloning to %s" % checkout_module_abspath,
+ )
if not dry_run:
# see whether we have a valid branch to checkout
try:
- mrepo = cast('Repo', mrepo)
+ mrepo = cast("Repo", mrepo)
# find a remote which has our branch - we try to be flexible
- remote_branch = find_first_remote_branch(mrepo.remotes, self.branch_name)
+ remote_branch = find_first_remote_branch(
+ mrepo.remotes, self.branch_name
+ )
local_branch = mkhead(mrepo, self.branch_path)
# have a valid branch, but no checkout - make sure we can figure
@@ -603,10 +705,15 @@ class Submodule(IndexObject, TraversableIterableObj):
# END initial checkout + branch creation
# make sure HEAD is not detached
- mrepo.head.set_reference(local_branch, logmsg="submodule: attaching head to %s" % local_branch)
+ mrepo.head.set_reference(
+ local_branch,
+ logmsg="submodule: attaching head to %s" % local_branch,
+ )
mrepo.head.reference.set_tracking_branch(remote_branch)
except (IndexError, InvalidGitRepositoryError):
- log.warning("Failed to checkout tracking branch %s", self.branch_path)
+ log.warning(
+ "Failed to checkout tracking branch %s", self.branch_path
+ )
# END handle tracking branch
# NOTE: Have to write the repo config file as well, otherwise
@@ -614,7 +721,7 @@ class Submodule(IndexObject, TraversableIterableObj):
# Maybe this is a good way to assure it doesn't get into our way, but
# we want to stay backwards compatible too ... . Its so redundant !
with self.repo.config_writer() as writer:
- writer.set_value(sm_section(self.name), 'url', self.url)
+ writer.set_value(sm_section(self.name), "url", self.url)
# END handle dry_run
# END handle initialization
@@ -628,7 +735,10 @@ class Submodule(IndexObject, TraversableIterableObj):
# END handle dry_run
if mrepo is not None and to_latest_revision:
- msg_base = "Cannot update to latest revision in repository at %r as " % mrepo.working_dir
+ msg_base = (
+ "Cannot update to latest revision in repository at %r as "
+ % mrepo.working_dir
+ )
if not is_detached:
rref = mrepo.head.reference.tracking_branch()
if rref is not None:
@@ -636,8 +746,11 @@ class Submodule(IndexObject, TraversableIterableObj):
binsha = rcommit.binsha
hexsha = rcommit.hexsha
else:
- log.error("%s a tracking branch was not set for local branch '%s'",
- msg_base, mrepo.head.reference)
+ log.error(
+ "%s a tracking branch was not set for local branch '%s'",
+ msg_base,
+ mrepo.head.reference,
+ )
# END handle remote ref
else:
log.error("%s there was no local tracking branch", msg_base)
@@ -654,28 +767,47 @@ class Submodule(IndexObject, TraversableIterableObj):
may_reset = True
if mrepo.head.commit.binsha != self.NULL_BIN_SHA:
base_commit = mrepo.merge_base(mrepo.head.commit, hexsha)
- if len(base_commit) == 0 or (base_commit[0] is not None and base_commit[0].hexsha == hexsha):
+ if len(base_commit) == 0 or (
+ base_commit[0] is not None and base_commit[0].hexsha == hexsha
+ ):
if force:
msg = "Will force checkout or reset on local branch that is possibly in the future of"
msg += "the commit it will be checked out to, effectively 'forgetting' new commits"
log.debug(msg)
else:
msg = "Skipping %s on branch '%s' of submodule repo '%s' as it contains un-pushed commits"
- msg %= (is_detached and "checkout" or "reset", mrepo.head, mrepo)
+ msg %= (
+ is_detached and "checkout" or "reset",
+ mrepo.head,
+ mrepo,
+ )
log.info(msg)
may_reset = False
# end handle force
# end handle if we are in the future
- if may_reset and not force and mrepo.is_dirty(index=True, working_tree=True, untracked_files=True):
- raise RepositoryDirtyError(mrepo, "Cannot reset a dirty repository")
+ if (
+ may_reset
+ and not force
+ and mrepo.is_dirty(
+ index=True, working_tree=True, untracked_files=True
+ )
+ ):
+ raise RepositoryDirtyError(
+ mrepo, "Cannot reset a dirty repository"
+ )
# end handle force and dirty state
# end handle empty repo
# end verify future/past
- progress.update(BEGIN | UPDWKTREE, 0, 1, prefix +
- "Updating working tree at %s for submodule %r to revision %s"
- % (self.path, self.name, hexsha))
+ progress.update(
+ BEGIN | UPDWKTREE,
+ 0,
+ 1,
+ prefix
+ + "Updating working tree at %s for submodule %r to revision %s"
+ % (self.path, self.name, hexsha),
+ )
if not dry_run and may_reset:
if is_detached:
@@ -688,8 +820,12 @@ class Submodule(IndexObject, TraversableIterableObj):
mrepo.head.reset(hexsha, index=True, working_tree=True)
# END handle checkout
# if we may reset/checkout
- progress.update(END | UPDWKTREE, 0, 1, prefix + "Done updating working tree for submodule %r"
- % self.name)
+ progress.update(
+ END | UPDWKTREE,
+ 0,
+ 1,
+ prefix + "Done updating working tree for submodule %r" % self.name,
+ )
# END update to new commit only if needed
except Exception as err:
if not keep_going:
@@ -703,8 +839,15 @@ class Submodule(IndexObject, TraversableIterableObj):
# in dry_run mode, the module might not exist
if mrepo is not None:
for submodule in self.iter_items(self.module()):
- submodule.update(recursive, init, to_latest_revision, progress=progress, dry_run=dry_run,
- force=force, keep_going=keep_going)
+ submodule.update(
+ recursive,
+ init,
+ to_latest_revision,
+ progress=progress,
+ dry_run=dry_run,
+ force=force,
+ keep_going=keep_going,
+ )
# END handle recursive update
# END handle dry run
# END for each submodule
@@ -712,7 +855,9 @@ class Submodule(IndexObject, TraversableIterableObj):
return self
@unbare_repo
- def move(self, module_path: PathLike, configuration: bool = True, module: bool = True) -> 'Submodule':
+ def move(
+ self, module_path: PathLike, configuration: bool = True, module: bool = True
+ ) -> "Submodule":
"""Move the submodule to a another module path. This involves physically moving
the repository at our current path, changing the configuration, as well as
adjusting our index entry accordingly.
@@ -732,7 +877,9 @@ class Submodule(IndexObject, TraversableIterableObj):
in an inconsistent state if a sub - step fails for some reason
"""
if module + configuration < 1:
- raise ValueError("You must specify to move at least the module or the configuration of the submodule")
+ raise ValueError(
+ "You must specify to move at least the module or the configuration of the submodule"
+ )
# END handle input
module_checkout_path = self._to_relative_path(self.repo, module_path)
@@ -742,9 +889,13 @@ class Submodule(IndexObject, TraversableIterableObj):
return self
# END handle no change
- module_checkout_abspath = join_path_native(str(self.repo.working_tree_dir), module_checkout_path)
+ module_checkout_abspath = join_path_native(
+ str(self.repo.working_tree_dir), module_checkout_path
+ )
if osp.isfile(module_checkout_abspath):
- raise ValueError("Cannot move repository onto a file: %s" % module_checkout_abspath)
+ raise ValueError(
+ "Cannot move repository onto a file: %s" % module_checkout_abspath
+ )
# END handle target files
index = self.repo.index
@@ -780,9 +931,11 @@ class Submodule(IndexObject, TraversableIterableObj):
os.renames(cur_path, module_checkout_abspath)
renamed_module = True
- if osp.isfile(osp.join(module_checkout_abspath, '.git')):
+ if osp.isfile(osp.join(module_checkout_abspath, ".git")):
module_abspath = self._module_abspath(self.repo, self.path, self.name)
- self._write_git_file_and_module_config(module_checkout_abspath, module_abspath)
+ self._write_git_file_and_module_config(
+ module_checkout_abspath, module_abspath
+ )
# end handle git file rewrite
# END move physical module
@@ -794,16 +947,20 @@ class Submodule(IndexObject, TraversableIterableObj):
try:
ekey = index.entry_key(self.path, 0)
entry = index.entries[ekey]
- del(index.entries[ekey])
- nentry = git.IndexEntry(entry[:3] + (module_checkout_path,) + entry[4:])
+ del index.entries[ekey]
+ nentry = git.IndexEntry(
+ entry[:3] + (module_checkout_path,) + entry[4:]
+ )
index.entries[tekey] = nentry
except KeyError as e:
- raise InvalidGitRepositoryError("Submodule's entry at %r did not exist" % (self.path)) from e
+ raise InvalidGitRepositoryError(
+ "Submodule's entry at %r did not exist" % (self.path)
+ ) from e
# END handle submodule doesn't exist
# update configuration
- with self.config_writer(index=index) as writer: # auto-write
- writer.set_value('path', module_checkout_path)
+ with self.config_writer(index=index) as writer: # auto-write
+ writer.set_value("path", module_checkout_path)
self.path = module_checkout_path
# END handle configuration flag
except Exception:
@@ -821,8 +978,13 @@ class Submodule(IndexObject, TraversableIterableObj):
return self
@unbare_repo
- def remove(self, module: bool = True, force: bool = False,
- configuration: bool = True, dry_run: bool = False) -> 'Submodule':
+ def remove(
+ self,
+ module: bool = True,
+ force: bool = False,
+ configuration: bool = True,
+ dry_run: bool = False,
+ ) -> "Submodule":
"""Remove this submodule from the repository. This will remove our entry
from the .gitmodules file and the entry in the .git / config file.
@@ -850,7 +1012,9 @@ class Submodule(IndexObject, TraversableIterableObj):
:raise InvalidGitRepositoryError: thrown if the repository cannot be deleted
:raise OSError: if directories or files could not be removed"""
if not (module or configuration):
- raise ValueError("Need to specify to delete at least the module, or the configuration")
+ raise ValueError(
+ "Need to specify to delete at least the module, or the configuration"
+ )
# END handle parameters
# Recursively remove children of this submodule
@@ -858,12 +1022,14 @@ class Submodule(IndexObject, TraversableIterableObj):
for csm in self.children():
nc += 1
csm.remove(module, force, configuration, dry_run)
- del(csm)
+ del csm
# end
if configuration and not dry_run and nc > 0:
# Assure we don't leave the parent repository in a dirty state, and commit our changes
# It's important for recursive, unforced, deletions to work as expected
- self.module().index.commit("Removed at least one of child-modules of '%s'" % self.name)
+ self.module().index.commit(
+ "Removed at least one of child-modules of '%s'" % self.name
+ )
# end handle recursion
# DELETE REPOSITORY WORKING TREE
@@ -882,7 +1048,9 @@ class Submodule(IndexObject, TraversableIterableObj):
elif osp.isdir(mp):
method = rmtree
elif osp.exists(mp):
- raise AssertionError("Cannot forcibly delete repository as it was neither a link, nor a directory")
+ raise AssertionError(
+ "Cannot forcibly delete repository as it was neither a link, nor a directory"
+ )
# END handle brutal deletion
if not dry_run:
assert method
@@ -893,7 +1061,8 @@ class Submodule(IndexObject, TraversableIterableObj):
if mod.is_dirty(index=True, working_tree=True, untracked_files=True):
raise InvalidGitRepositoryError(
"Cannot delete module at %s with any modifications, unless force is specified"
- % mod.working_tree_dir)
+ % mod.working_tree_dir
+ )
# END check for dirt
# figure out whether we have new commits compared to the remotes
@@ -910,30 +1079,36 @@ class Submodule(IndexObject, TraversableIterableObj):
# not a single remote branch contained all our commits
if len(rrefs) and num_branches_with_new_commits == len(rrefs):
raise InvalidGitRepositoryError(
- "Cannot delete module at %s as there are new commits" % mod.working_tree_dir)
+ "Cannot delete module at %s as there are new commits"
+ % mod.working_tree_dir
+ )
# END handle new commits
# have to manually delete references as python's scoping is
# not existing, they could keep handles open ( on windows this is a problem )
if len(rrefs):
- del(rref) # skipcq: PYL-W0631
+ del rref # skipcq: PYL-W0631
# END handle remotes
- del(rrefs)
- del(remote)
+ del rrefs
+ del remote
# END for each remote
# finally delete our own submodule
if not dry_run:
self._clear_cache()
wtd = mod.working_tree_dir
- del(mod) # release file-handles (windows)
+ del mod # release file-handles (windows)
import gc
+
gc.collect()
try:
rmtree(str(wtd))
except Exception as ex:
if HIDE_WINDOWS_KNOWN_ERRORS:
from unittest import SkipTest
- raise SkipTest("FIXME: fails with: PermissionError\n {}".format(ex)) from ex
+
+ raise SkipTest(
+ "FIXME: fails with: PermissionError\n {}".format(ex)
+ ) from ex
raise
# END delete tree if possible
# END handle force
@@ -945,7 +1120,10 @@ class Submodule(IndexObject, TraversableIterableObj):
except Exception as ex:
if HIDE_WINDOWS_KNOWN_ERRORS:
from unittest import SkipTest
- raise SkipTest(f"FIXME: fails with: PermissionError\n {ex}") from ex
+
+ raise SkipTest(
+ f"FIXME: fails with: PermissionError\n {ex}"
+ ) from ex
else:
raise
# end handle separate bare repository
@@ -961,7 +1139,7 @@ class Submodule(IndexObject, TraversableIterableObj):
# first the index-entry
parent_index = self.repo.index
try:
- del(parent_index.entries[parent_index.entry_key(self.path, 0)])
+ del parent_index.entries[parent_index.entry_key(self.path, 0)]
except KeyError:
pass
# END delete entry
@@ -979,7 +1157,9 @@ class Submodule(IndexObject, TraversableIterableObj):
return self
- def set_parent_commit(self, commit: Union[Commit_ish, None], check: bool = True) -> 'Submodule':
+ def set_parent_commit(
+ self, commit: Union[Commit_ish, None], check: bool = True
+ ) -> "Submodule":
"""Set this instance to use the given commit whose tree is supposed to
contain the .gitmodules blob.
@@ -1000,7 +1180,10 @@ class Submodule(IndexObject, TraversableIterableObj):
pcommit = self.repo.commit(commit)
pctree = pcommit.tree
if self.k_modules_file not in pctree:
- raise ValueError("Tree of commit %s did not contain the %s file" % (commit, self.k_modules_file))
+ raise ValueError(
+ "Tree of commit %s did not contain the %s file"
+ % (commit, self.k_modules_file)
+ )
# END handle exceptions
prev_pc = self._parent_commit
@@ -1010,7 +1193,10 @@ class Submodule(IndexObject, TraversableIterableObj):
parser = self._config_parser(self.repo, self._parent_commit, read_only=True)
if not parser.has_section(sm_section(self.name)):
self._parent_commit = prev_pc
- raise ValueError("Submodule at path %r did not exist in parent commit %s" % (self.path, commit))
+ raise ValueError(
+ "Submodule at path %r did not exist in parent commit %s"
+ % (self.path, commit)
+ )
# END handle submodule did not exist
# END handle checking mode
@@ -1027,8 +1213,9 @@ class Submodule(IndexObject, TraversableIterableObj):
return self
@unbare_repo
- def config_writer(self, index: Union['IndexFile', None] = None, write: bool = True
- ) -> SectionConstraint['SubmoduleConfigParser']:
+ def config_writer(
+ self, index: Union["IndexFile", None] = None, write: bool = True
+ ) -> SectionConstraint["SubmoduleConfigParser"]:
""":return: a config writer instance allowing you to read and write the data
belonging to this submodule into the .gitmodules file.
@@ -1049,7 +1236,7 @@ class Submodule(IndexObject, TraversableIterableObj):
return writer
@unbare_repo
- def rename(self, new_name: str) -> 'Submodule':
+ def rename(self, new_name: str) -> "Submodule":
"""Rename this submodule
:note: This method takes care of renaming the submodule in various places, such as
@@ -1081,7 +1268,9 @@ class Submodule(IndexObject, TraversableIterableObj):
# .git/modules
mod = self.module()
if mod.has_separate_working_tree():
- destination_module_abspath = self._module_abspath(self.repo, self.path, new_name)
+ destination_module_abspath = self._module_abspath(
+ self.repo, self.path, new_name
+ )
source_dir = mod.git_dir
# Let's be sure the submodule name is not so obviously tied to a directory
if str(destination_module_abspath).startswith(str(mod.git_dir)):
@@ -1091,17 +1280,19 @@ class Submodule(IndexObject, TraversableIterableObj):
# end handle self-containment
os.renames(source_dir, destination_module_abspath)
if mod.working_tree_dir:
- self._write_git_file_and_module_config(mod.working_tree_dir, destination_module_abspath)
+ self._write_git_file_and_module_config(
+ mod.working_tree_dir, destination_module_abspath
+ )
# end move separate git repository
return self
- #} END edit interface
+ # } END edit interface
- #{ Query Interface
+ # { Query Interface
@unbare_repo
- def module(self) -> 'Repo':
+ def module(self) -> "Repo":
""":return: Repo instance initialized from the repository at our submodule path
:raise InvalidGitRepositoryError: if a repository was not available. This could
also mean that it was not yet initialized"""
@@ -1113,9 +1304,13 @@ class Submodule(IndexObject, TraversableIterableObj):
return repo
# END handle repo uninitialized
except (InvalidGitRepositoryError, NoSuchPathError) as e:
- raise InvalidGitRepositoryError("No valid repository at %s" % module_checkout_abspath) from e
+ raise InvalidGitRepositoryError(
+ "No valid repository at %s" % module_checkout_abspath
+ ) from e
else:
- raise InvalidGitRepositoryError("Repository at %r was not yet checked out" % module_checkout_abspath)
+ raise InvalidGitRepositoryError(
+ "Repository at %r was not yet checked out" % module_checkout_abspath
+ )
# END handle exceptions
def module_exists(self) -> bool:
@@ -1162,7 +1357,7 @@ class Submodule(IndexObject, TraversableIterableObj):
# END handle object state consistency
@property
- def branch(self) -> 'Head':
+ def branch(self) -> "Head":
""":return: The branch instance that we are to checkout
:raise InvalidGitRepositoryError: if our module is not yet checked out"""
return mkhead(self.module(), self._branch_path)
@@ -1187,7 +1382,7 @@ class Submodule(IndexObject, TraversableIterableObj):
return self._url
@property
- def parent_commit(self) -> 'Commit_ish':
+ def parent_commit(self) -> "Commit_ish":
""":return: Commit instance with the tree containing the .gitmodules file
:note: will always point to the current head's commit if it was not set explicitly"""
if self._parent_commit is None:
@@ -1215,22 +1410,27 @@ class Submodule(IndexObject, TraversableIterableObj):
:raise IOError: If the .gitmodules file/blob could not be read"""
return self._config_parser_constrained(read_only=True)
- def children(self) -> IterableList['Submodule']:
+ def children(self) -> IterableList["Submodule"]:
"""
:return: IterableList(Submodule, ...) an iterable list of submodules instances
which are children of this submodule or 0 if the submodule is not checked out"""
return self._get_intermediate_items(self)
- #} END query interface
+ # } END query interface
- #{ Iterable Interface
+ # { Iterable Interface
@classmethod
- def iter_items(cls, repo: 'Repo', parent_commit: Union[Commit_ish, str] = 'HEAD', *Args: Any, **kwargs: Any
- ) -> Iterator['Submodule']:
+ def iter_items(
+ cls,
+ repo: "Repo",
+ parent_commit: Union[Commit_ish, str] = "HEAD",
+ *Args: Any,
+ **kwargs: Any,
+ ) -> Iterator["Submodule"]:
""":return: iterator yielding Submodule instances available in the given repository"""
try:
- pc = repo.commit(parent_commit) # parent commit instance
+ pc = repo.commit(parent_commit) # parent commit instance
parser = cls._config_parser(repo, pc, read_only=True)
except (IOError, BadName):
return iter([])
@@ -1238,8 +1438,8 @@ class Submodule(IndexObject, TraversableIterableObj):
for sms in parser.sections():
n = sm_name(sms)
- p = parser.get(sms, 'path')
- u = parser.get(sms, 'url')
+ p = parser.get(sms, "path")
+ u = parser.get(sms, "url")
b = cls.k_head_default
if parser.has_option(sms, cls.k_head_option):
b = str(parser.get(sms, cls.k_head_option))
@@ -1248,7 +1448,7 @@ class Submodule(IndexObject, TraversableIterableObj):
# get the binsha
index = repo.index
try:
- rt = pc.tree # root tree
+ rt = pc.tree # root tree
sm = rt[p]
except KeyError:
# try the index, maybe it was just added
@@ -1273,4 +1473,4 @@ class Submodule(IndexObject, TraversableIterableObj):
yield sm
# END for each section
- #} END iterable interface
+ # } END iterable interface
diff --git a/git/objects/submodule/root.py b/git/objects/submodule/root.py
index 08e1f954..16f0f91f 100644
--- a/git/objects/submodule/root.py
+++ b/git/objects/submodule/root.py
@@ -1,7 +1,4 @@
-from .base import (
- Submodule,
- UpdateProgress
-)
+from .base import Submodule, UpdateProgress
from .util import find_first_remote_branch
from git.exc import InvalidGitRepositoryError
import git
@@ -22,14 +19,17 @@ if TYPE_CHECKING:
__all__ = ["RootModule", "RootUpdateProgress"]
-log = logging.getLogger('git.objects.submodule.root')
+log = logging.getLogger("git.objects.submodule.root")
log.addHandler(logging.NullHandler())
class RootUpdateProgress(UpdateProgress):
"""Utility class which adds more opcodes to the UpdateProgress"""
+
REMOVE, PATHCHANGE, BRANCHCHANGE, URLCHANGE = [
- 1 << x for x in range(UpdateProgress._num_op_codes, UpdateProgress._num_op_codes + 4)]
+ 1 << x
+ for x in range(UpdateProgress._num_op_codes, UpdateProgress._num_op_codes + 4)
+ ]
_num_op_codes = UpdateProgress._num_op_codes + 4
__slots__ = ()
@@ -50,32 +50,39 @@ class RootModule(Submodule):
__slots__ = ()
- k_root_name = '__ROOT__'
+ k_root_name = "__ROOT__"
- def __init__(self, repo: 'Repo'):
+ def __init__(self, repo: "Repo"):
# repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, ref=None)
super(RootModule, self).__init__(
repo,
binsha=self.NULL_BIN_SHA,
mode=self.k_default_mode,
- path='',
+ path="",
name=self.k_root_name,
parent_commit=repo.head.commit,
- url='',
- branch_path=git.Head.to_full_path(self.k_head_default)
+ url="",
+ branch_path=git.Head.to_full_path(self.k_head_default),
)
def _clear_cache(self) -> None:
"""May not do anything"""
pass
- #{ Interface
-
- def update(self, previous_commit: Union[Commit_ish, None] = None, # type: ignore[override]
- recursive: bool = True, force_remove: bool = False, init: bool = True,
- to_latest_revision: bool = False, progress: Union[None, 'RootUpdateProgress'] = None,
- dry_run: bool = False, force_reset: bool = False, keep_going: bool = False
- ) -> 'RootModule':
+ # { Interface
+
+ def update(
+ self,
+ previous_commit: Union[Commit_ish, None] = None, # type: ignore[override]
+ recursive: bool = True,
+ force_remove: bool = False,
+ init: bool = True,
+ to_latest_revision: bool = False,
+ progress: Union[None, "RootUpdateProgress"] = None,
+ dry_run: bool = False,
+ force_reset: bool = False,
+ keep_going: bool = False,
+ ) -> "RootModule":
"""Update the submodules of this repository to the current HEAD commit.
This method behaves smartly by determining changes of the path of a submodules
repository, next to changes to the to-be-checked-out commit or the branch to be
@@ -109,16 +116,18 @@ class RootModule(Submodule):
In conjunction with dry_run, it can be useful to anticipate all errors when updating submodules
:return: self"""
if self.repo.bare:
- raise InvalidGitRepositoryError("Cannot update submodules in bare repositories")
+ raise InvalidGitRepositoryError(
+ "Cannot update submodules in bare repositories"
+ )
# END handle bare
if progress is None:
progress = RootUpdateProgress()
# END assure progress is set
- prefix = ''
+ prefix = ""
if dry_run:
- prefix = 'DRY-RUN: '
+ prefix = "DRY-RUN: "
repo = self.repo
@@ -137,17 +146,19 @@ class RootModule(Submodule):
previous_commit = cur_commit
# END exception handling
else:
- previous_commit = repo.commit(previous_commit) # obtain commit object
+ previous_commit = repo.commit(previous_commit) # obtain commit object
# END handle previous commit
- psms: 'IterableList[Submodule]' = self.list_items(repo, parent_commit=previous_commit)
- sms: 'IterableList[Submodule]' = self.list_items(repo)
+ psms: "IterableList[Submodule]" = self.list_items(
+ repo, parent_commit=previous_commit
+ )
+ sms: "IterableList[Submodule]" = self.list_items(repo)
spsms = set(psms)
ssms = set(sms)
# HANDLE REMOVALS
###################
- rrsm = (spsms - ssms)
+ rrsm = spsms - ssms
len_rrsm = len(rrsm)
for i, rsm in enumerate(rrsm):
@@ -158,37 +169,58 @@ class RootModule(Submodule):
# fake it into thinking its at the current commit to allow deletion
# of previous module. Trigger the cache to be updated before that
- progress.update(op, i, len_rrsm, prefix + "Removing submodule %r at %s" % (rsm.name, rsm.abspath))
+ progress.update(
+ op,
+ i,
+ len_rrsm,
+ prefix + "Removing submodule %r at %s" % (rsm.name, rsm.abspath),
+ )
rsm._parent_commit = repo.head.commit
- rsm.remove(configuration=False, module=True, force=force_remove, dry_run=dry_run)
+ rsm.remove(
+ configuration=False,
+ module=True,
+ force=force_remove,
+ dry_run=dry_run,
+ )
if i == len_rrsm - 1:
op |= END
# END handle end
- progress.update(op, i, len_rrsm, prefix + "Done removing submodule %r" % rsm.name)
+ progress.update(
+ op, i, len_rrsm, prefix + "Done removing submodule %r" % rsm.name
+ )
# END for each removed submodule
# HANDLE PATH RENAMES
#####################
# url changes + branch changes
- csms = (spsms & ssms)
+ csms = spsms & ssms
len_csms = len(csms)
for i, csm in enumerate(csms):
- psm: 'Submodule' = psms[csm.name]
- sm: 'Submodule' = sms[csm.name]
+ psm: "Submodule" = psms[csm.name]
+ sm: "Submodule" = sms[csm.name]
# PATH CHANGES
##############
if sm.path != psm.path and psm.module_exists():
- progress.update(BEGIN | PATHCHANGE, i, len_csms, prefix +
- "Moving repository of submodule %r from %s to %s"
- % (sm.name, psm.abspath, sm.abspath))
+ progress.update(
+ BEGIN | PATHCHANGE,
+ i,
+ len_csms,
+ prefix
+ + "Moving repository of submodule %r from %s to %s"
+ % (sm.name, psm.abspath, sm.abspath),
+ )
# move the module to the new path
if not dry_run:
psm.move(sm.path, module=True, configuration=False)
# END handle dry_run
progress.update(
- END | PATHCHANGE, i, len_csms, prefix + "Done moving repository of submodule %r" % sm.name)
+ END | PATHCHANGE,
+ i,
+ len_csms,
+ prefix + "Done moving repository of submodule %r" % sm.name,
+ )
# END handle path changes
if sm.module_exists():
@@ -198,14 +230,20 @@ class RootModule(Submodule):
# Add the new remote, remove the old one
# This way, if the url just changes, the commits will not
# have to be re-retrieved
- nn = '__new_origin__'
+ nn = "__new_origin__"
smm = sm.module()
rmts = smm.remotes
# don't do anything if we already have the url we search in place
if len([r for r in rmts if r.url == sm.url]) == 0:
- progress.update(BEGIN | URLCHANGE, i, len_csms, prefix +
- "Changing url of submodule %r from %s to %s" % (sm.name, psm.url, sm.url))
+ progress.update(
+ BEGIN | URLCHANGE,
+ i,
+ len_csms,
+ prefix
+ + "Changing url of submodule %r from %s to %s"
+ % (sm.name, psm.url, sm.url),
+ )
if not dry_run:
assert nn not in [r.name for r in rmts]
@@ -214,7 +252,16 @@ class RootModule(Submodule):
# If we have a tracking branch, it should be available
# in the new remote as well.
- if len([r for r in smr.refs if r.remote_head == sm.branch_name]) == 0:
+ if (
+ len(
+ [
+ r
+ for r in smr.refs
+ if r.remote_head == sm.branch_name
+ ]
+ )
+ == 0
+ ):
raise ValueError(
"Submodule branch named %r was not available in new submodule remote at %r"
% (sm.branch_name, sm.url)
@@ -242,7 +289,9 @@ class RootModule(Submodule):
# Alternatively we could just generate a unique name and leave all
# existing ones in place
raise InvalidGitRepositoryError(
- "Couldn't find original remote-repo at url %r" % psm.url)
+ "Couldn't find original remote-repo at url %r"
+ % psm.url
+ )
# END handle one single remote
# END handle check we found a remote
@@ -277,15 +326,23 @@ class RootModule(Submodule):
# this way, it will be checked out in the next step
# This will change the submodule relative to us, so
# the user will be able to commit the change easily
- log.warning("Current sha %s was not contained in the tracking\
- branch at the new remote, setting it the the remote's tracking branch", sm.hexsha)
+ log.warning(
+ "Current sha %s was not contained in the tracking\
+ branch at the new remote, setting it the the remote's tracking branch",
+ sm.hexsha,
+ )
sm.binsha = rref.commit.binsha
# END reset binsha
# NOTE: All checkout is performed by the base implementation of update
# END handle dry_run
progress.update(
- END | URLCHANGE, i, len_csms, prefix + "Done adjusting url of submodule %r" % (sm.name))
+ END | URLCHANGE,
+ i,
+ len_csms,
+ prefix
+ + "Done adjusting url of submodule %r" % (sm.name),
+ )
# END skip remote handling if new url already exists in module
# END handle url
@@ -294,9 +351,14 @@ class RootModule(Submodule):
if sm.branch_path != psm.branch_path:
# finally, create a new tracking branch which tracks the
# new remote branch
- progress.update(BEGIN | BRANCHCHANGE, i, len_csms, prefix +
- "Changing branch of submodule %r from %s to %s"
- % (sm.name, psm.branch_path, sm.branch_path))
+ progress.update(
+ BEGIN | BRANCHCHANGE,
+ i,
+ len_csms,
+ prefix
+ + "Changing branch of submodule %r from %s to %s"
+ % (sm.name, psm.branch_path, sm.branch_path),
+ )
if not dry_run:
smm = sm.module()
smmr = smm.remotes
@@ -306,13 +368,19 @@ class RootModule(Submodule):
# end for each remote
try:
- tbr = git.Head.create(smm, sm.branch_name, logmsg='branch: Created from HEAD')
+ tbr = git.Head.create(
+ smm,
+ sm.branch_name,
+ logmsg="branch: Created from HEAD",
+ )
except OSError:
# ... or reuse the existing one
tbr = git.Head(smm, sm.branch_path)
# END assure tracking branch exists
- tbr.set_tracking_branch(find_first_remote_branch(smmr, sm.branch_name))
+ tbr.set_tracking_branch(
+ find_first_remote_branch(smmr, sm.branch_name)
+ )
# NOTE: All head-resetting is done in the base implementation of update
# but we will have to checkout the new branch here. As it still points to the currently
# checkout out commit, we don't do any harm.
@@ -321,7 +389,11 @@ class RootModule(Submodule):
# END handle dry_run
progress.update(
- END | BRANCHCHANGE, i, len_csms, prefix + "Done changing branch of submodule %r" % sm.name)
+ END | BRANCHCHANGE,
+ i,
+ len_csms,
+ prefix + "Done changing branch of submodule %r" % sm.name,
+ )
# END handle branch
# END handle
# END for each common submodule
@@ -335,8 +407,15 @@ class RootModule(Submodule):
######################################
for sm in sms:
# update the submodule using the default method
- sm.update(recursive=False, init=init, to_latest_revision=to_latest_revision,
- progress=progress, dry_run=dry_run, force=force_reset, keep_going=keep_going)
+ sm.update(
+ recursive=False,
+ init=init,
+ to_latest_revision=to_latest_revision,
+ progress=progress,
+ dry_run=dry_run,
+ force=force_reset,
+ keep_going=keep_going,
+ )
# update recursively depth first - question is which inconsistent
# state will be better in case it fails somewhere. Defective branch
@@ -345,18 +424,27 @@ class RootModule(Submodule):
if recursive:
# the module would exist by now if we are not in dry_run mode
if sm.module_exists():
- type(self)(sm.module()).update(recursive=True, force_remove=force_remove,
- init=init, to_latest_revision=to_latest_revision,
- progress=progress, dry_run=dry_run, force_reset=force_reset,
- keep_going=keep_going)
+ type(self)(sm.module()).update(
+ recursive=True,
+ force_remove=force_remove,
+ init=init,
+ to_latest_revision=to_latest_revision,
+ progress=progress,
+ dry_run=dry_run,
+ force_reset=force_reset,
+ keep_going=keep_going,
+ )
# END handle dry_run
# END handle recursive
# END for each submodule to update
return self
- def module(self) -> 'Repo':
+ def module(self) -> "Repo":
""":return: the actual repository containing the submodules"""
return self.repo
- #} END interface
-#} END classes
+
+ # } END interface
+
+
+# } END classes
diff --git a/git/objects/submodule/util.py b/git/objects/submodule/util.py
index cc1cd60a..456ae34b 100644
--- a/git/objects/submodule/util.py
+++ b/git/objects/submodule/util.py
@@ -20,10 +20,15 @@ if TYPE_CHECKING:
from git.refs import RemoteReference
-__all__ = ('sm_section', 'sm_name', 'mkhead', 'find_first_remote_branch',
- 'SubmoduleConfigParser')
+__all__ = (
+ "sm_section",
+ "sm_name",
+ "mkhead",
+ "find_first_remote_branch",
+ "SubmoduleConfigParser",
+)
-#{ Utilities
+# { Utilities
def sm_section(name: str) -> str:
@@ -37,12 +42,14 @@ def sm_name(section: str) -> str:
return section[11:-1]
-def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
+def mkhead(repo: "Repo", path: PathLike) -> "Head":
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path))
-def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
+def find_first_remote_branch(
+ remotes: Sequence["Remote"], branch_name: str
+) -> "RemoteReference":
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
@@ -51,12 +58,16 @@ def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> '
continue
# END exception handling
# END for remote
- raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name)
+ raise InvalidGitRepositoryError(
+ "Didn't find remote branch '%r' in any of the given remotes" % branch_name
+ )
-#} END utilities
+# } END utilities
+
+
+# { Classes
-#{ Classes
class SubmoduleConfigParser(GitConfigParser):
@@ -70,13 +81,13 @@ class SubmoduleConfigParser(GitConfigParser):
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
- self._smref: Union['ReferenceType[Submodule]', None] = None
+ self._smref: Union["ReferenceType[Submodule]", None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
- #{ Interface
- def set_submodule(self, submodule: 'Submodule') -> None:
+ # { Interface
+ def set_submodule(self, submodule: "Submodule") -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule)
@@ -97,14 +108,15 @@ class SubmoduleConfigParser(GitConfigParser):
sm._clear_cache()
# END handle weakref
- #} END interface
+ # } END interface
- #{ Overridden Methods
+ # { Overridden Methods
def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval
+
# END overridden methods
-#} END classes
+# } END classes
diff --git a/git/objects/tag.py b/git/objects/tag.py
index 7048eb40..3956a89e 100644
--- a/git/objects/tag.py
+++ b/git/objects/tag.py
@@ -20,23 +20,34 @@ if TYPE_CHECKING:
from .blob import Blob
from .tree import Tree
-__all__ = ("TagObject", )
+__all__ = ("TagObject",)
class TagObject(base.Object):
"""Non-Lightweight tag carrying additional information about an object we are pointing to."""
- type: Literal['tag'] = "tag"
- __slots__ = ("object", "tag", "tagger", "tagged_date", "tagger_tz_offset", "message")
- def __init__(self, repo: 'Repo', binsha: bytes,
- object: Union[None, base.Object] = None,
- tag: Union[None, str] = None,
- tagger: Union[None, 'Actor'] = None,
- tagged_date: Union[int, None] = None,
- tagger_tz_offset: Union[int, None] = None,
- message: Union[str, None] = None
- ) -> None: # @ReservedAssignment
+ type: Literal["tag"] = "tag"
+ __slots__ = (
+ "object",
+ "tag",
+ "tagger",
+ "tagged_date",
+ "tagger_tz_offset",
+ "message",
+ )
+
+ def __init__(
+ self,
+ repo: "Repo",
+ binsha: bytes,
+ object: Union[None, base.Object] = None,
+ tag: Union[None, str] = None,
+ tagger: Union[None, "Actor"] = None,
+ tagged_date: Union[int, None] = None,
+ tagger_tz_offset: Union[int, None] = None,
+ message: Union[str, None] = None,
+ ) -> None: # @ReservedAssignment
"""Initialize a tag object with additional data
:param repo: repository this object is located in
@@ -51,7 +62,7 @@ class TagObject(base.Object):
authored_date is in, in a format similar to time.altzone"""
super(TagObject, self).__init__(repo, binsha)
if object is not None:
- self.object: Union['Commit', 'Blob', 'Tree', 'TagObject'] = object
+ self.object: Union["Commit", "Blob", "Tree", "TagObject"] = object
if tag is not None:
self.tag = tag
if tagger is not None:
@@ -67,19 +78,22 @@ class TagObject(base.Object):
"""Cache all our attributes at once"""
if attr in TagObject.__slots__:
ostream = self.repo.odb.stream(self.binsha)
- lines: List[str] = ostream.read().decode(defenc, 'replace').splitlines()
+ lines: List[str] = ostream.read().decode(defenc, "replace").splitlines()
_obj, hexsha = lines[0].split(" ")
_type_token, type_name = lines[1].split(" ")
- object_type = get_object_type_by_name(type_name.encode('ascii'))
- self.object = \
- object_type(self.repo, hex_to_bin(hexsha))
+ object_type = get_object_type_by_name(type_name.encode("ascii"))
+ self.object = object_type(self.repo, hex_to_bin(hexsha))
self.tag = lines[2][4:] # tag <tag name>
if len(lines) > 3:
tagger_info = lines[3] # tagger <actor> <date>
- self.tagger, self.tagged_date, self.tagger_tz_offset = parse_actor_and_date(tagger_info)
+ (
+ self.tagger,
+ self.tagged_date,
+ self.tagger_tz_offset,
+ ) = parse_actor_and_date(tagger_info)
# line 4 empty - it could mark the beginning of the next header
# in case there really is no message, it would not exist. Otherwise
@@ -87,7 +101,7 @@ class TagObject(base.Object):
if len(lines) > 5:
self.message = "\n".join(lines[5:])
else:
- self.message = ''
+ self.message = ""
# END check our attributes
else:
super(TagObject, self)._set_cache_(attr)
diff --git a/git/objects/tree.py b/git/objects/tree.py
index 22531895..e1fcced7 100644
--- a/git/objects/tree.py
+++ b/git/objects/tree.py
@@ -13,16 +13,24 @@ from .base import IndexObject, IndexObjUnion
from .blob import Blob
from .submodule.base import Submodule
-from .fun import (
- tree_entries_from_data,
- tree_to_stream
-)
+from .fun import tree_entries_from_data, tree_to_stream
# typing -------------------------------------------------
-from typing import (Any, Callable, Dict, Iterable, Iterator, List,
- Tuple, Type, Union, cast, TYPE_CHECKING)
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Tuple,
+ Type,
+ Union,
+ cast,
+ TYPE_CHECKING,
+)
from git.types import PathLike, Literal
@@ -32,14 +40,15 @@ if TYPE_CHECKING:
TreeCacheTup = Tuple[bytes, int, str]
-TraversedTreeTup = Union[Tuple[Union['Tree', None], IndexObjUnion,
- Tuple['Submodule', 'Submodule']]]
+TraversedTreeTup = Union[
+ Tuple[Union["Tree", None], IndexObjUnion, Tuple["Submodule", "Submodule"]]
+]
# def is_tree_cache(inp: Tuple[bytes, int, str]) -> TypeGuard[TreeCacheTup]:
# return isinstance(inp[0], bytes) and isinstance(inp[1], int) and isinstance([inp], str)
-#--------------------------------------------------------
+# --------------------------------------------------------
cmp: Callable[[str, str], int] = lambda a, b: (a > b) - (a < b)
@@ -60,8 +69,9 @@ def git_cmp(t1: TreeCacheTup, t2: TreeCacheTup) -> int:
return len_a - len_b
-def merge_sort(a: List[TreeCacheTup],
- cmp: Callable[[TreeCacheTup, TreeCacheTup], int]) -> None:
+def merge_sort(
+ a: List[TreeCacheTup], cmp: Callable[[TreeCacheTup, TreeCacheTup], int]
+) -> None:
if len(a) < 2:
return None
@@ -102,7 +112,8 @@ class TreeModifier(object):
Once all adjustments are complete, the _cache, which really is a reference to
the cache of a tree, will be sorted. Assuring it will be in a serializable state"""
- __slots__ = '_cache'
+
+ __slots__ = "_cache"
def __init__(self, cache: List[TreeCacheTup]) -> None:
self._cache = cache
@@ -116,18 +127,21 @@ class TreeModifier(object):
# END for each item in cache
return -1
- #{ Interface
- def set_done(self) -> 'TreeModifier':
+ # { Interface
+ def set_done(self) -> "TreeModifier":
"""Call this method once you are done modifying the tree information.
It may be called several times, but be aware that each call will cause
a sort operation
:return self:"""
merge_sort(self._cache, git_cmp)
return self
- #} END interface
- #{ Mutators
- def add(self, sha: bytes, mode: int, name: str, force: bool = False) -> 'TreeModifier':
+ # } END interface
+
+ # { Mutators
+ def add(
+ self, sha: bytes, mode: int, name: str, force: bool = False
+ ) -> "TreeModifier":
"""Add the given item to the tree. If an item with the given name already
exists, nothing will be done, but a ValueError will be raised if the
sha and mode of the existing item do not match the one you add, unless
@@ -138,7 +152,7 @@ class TreeModifier(object):
:param force: If True, an item with your name and information will overwrite
any existing item with the same name, no matter which information it has
:return: self"""
- if '/' in name:
+ if "/" in name:
raise ValueError("Name must not contain '/' characters")
if (mode >> 12) not in Tree._map_id_to_type:
raise ValueError("Invalid object type according to mode %o" % mode)
@@ -168,7 +182,11 @@ class TreeModifier(object):
puts the caller into responsibility to assure the input is correct.
For more information on the parameters, see ``add``
:param binsha: 20 byte binary sha"""
- assert isinstance(binsha, bytes) and isinstance(mode, int) and isinstance(name, str)
+ assert (
+ isinstance(binsha, bytes)
+ and isinstance(mode, int)
+ and isinstance(name, str)
+ )
tree_cache = (binsha, mode, name)
self._cache.append(tree_cache)
@@ -177,9 +195,9 @@ class TreeModifier(object):
"""Deletes an item with the given name if it exists"""
index = self._index_by_name(name)
if index > -1:
- del(self._cache[index])
+ del self._cache[index]
- #} END mutators
+ # } END mutators
class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
@@ -195,11 +213,11 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
blob = tree[0]
"""
- type: Literal['tree'] = "tree"
+ type: Literal["tree"] = "tree"
__slots__ = "_cache"
# actual integer ids for comparison
- commit_id = 0o16 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link
+ commit_id = 0o16 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link
blob_id = 0o10
symlink_id = 0o12
tree_id = 0o04
@@ -211,12 +229,20 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
# tree id added once Tree is defined
}
- def __init__(self, repo: 'Repo', binsha: bytes, mode: int = tree_id << 12, path: Union[PathLike, None] = None):
+ def __init__(
+ self,
+ repo: "Repo",
+ binsha: bytes,
+ mode: int = tree_id << 12,
+ path: Union[PathLike, None] = None,
+ ):
super(Tree, self).__init__(repo, binsha, mode, path)
- @ classmethod
- def _get_intermediate_items(cls, index_object: IndexObjUnion,
- ) -> Union[Tuple['Tree', ...], Tuple[()]]:
+ @classmethod
+ def _get_intermediate_items(
+ cls,
+ index_object: IndexObjUnion,
+ ) -> Union[Tuple["Tree", ...], Tuple[()]]:
if index_object.type == "tree":
return tuple(index_object._iter_convert_to_object(index_object._cache))
return ()
@@ -230,8 +256,9 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
super(Tree, self)._set_cache_(attr)
# END handle attribute
- def _iter_convert_to_object(self, iterable: Iterable[TreeCacheTup]
- ) -> Iterator[IndexObjUnion]:
+ def _iter_convert_to_object(
+ self, iterable: Iterable[TreeCacheTup]
+ ) -> Iterator[IndexObjUnion]:
"""Iterable yields tuples of (binsha, mode, name), which will be converted
to the respective object representation"""
for binsha, mode, name in iterable:
@@ -239,7 +266,9 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
try:
yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
except KeyError as e:
- raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path)) from e
+ raise TypeError(
+ "Unknown mode %o found in tree data for path '%s'" % (mode, path)
+ ) from e
# END for each item
def join(self, file: str) -> IndexObjUnion:
@@ -248,13 +277,13 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
:raise KeyError: if given file or tree does not exist in tree"""
msg = "Blob or Tree named %r not found"
- if '/' in file:
+ if "/" in file:
tree = self
item = self
- tokens = file.split('/')
+ tokens = file.split("/")
for i, token in enumerate(tokens):
item = tree[token]
- if item.type == 'tree':
+ if item.type == "tree":
tree = item
else:
# safety assertion - blobs are at the end of the path
@@ -268,9 +297,10 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
return item
else:
for info in self._cache:
- if info[2] == file: # [2] == name
- return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1],
- join_path(self.path, info[2]))
+ if info[2] == file: # [2] == name
+ return self._map_id_to_type[info[1] >> 12](
+ self.repo, info[0], info[1], join_path(self.path, info[2])
+ )
# END for each obj
raise KeyError(msg % file)
# END handle long paths
@@ -279,17 +309,17 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
"""For PY3 only"""
return self.join(file)
- @ property
- def trees(self) -> List['Tree']:
+ @property
+ def trees(self) -> List["Tree"]:
""":return: list(Tree, ...) list of trees directly below this tree"""
return [i for i in self if i.type == "tree"]
- @ property
+ @property
def blobs(self) -> List[Blob]:
""":return: list(Blob, ...) list of blobs directly below this tree"""
return [i for i in self if i.type == "blob"]
- @ property
+ @property
def cache(self) -> TreeModifier:
"""
:return: An object allowing to modify the internal cache. This can be used
@@ -298,16 +328,20 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
See the ``TreeModifier`` for more information on how to alter the cache"""
return TreeModifier(self._cache)
- def traverse(self, # type: ignore[override]
- predicate: Callable[[Union[IndexObjUnion, TraversedTreeTup], int], bool] = lambda i, d: True,
- prune: Callable[[Union[IndexObjUnion, TraversedTreeTup], int], bool] = lambda i, d: False,
- depth: int = -1,
- branch_first: bool = True,
- visit_once: bool = False,
- ignore_self: int = 1,
- as_edge: bool = False
- ) -> Union[Iterator[IndexObjUnion],
- Iterator[TraversedTreeTup]]:
+ def traverse(
+ self, # type: ignore[override]
+ predicate: Callable[
+ [Union[IndexObjUnion, TraversedTreeTup], int], bool
+ ] = lambda i, d: True,
+ prune: Callable[
+ [Union[IndexObjUnion, TraversedTreeTup], int], bool
+ ] = lambda i, d: False,
+ depth: int = -1,
+ branch_first: bool = True,
+ visit_once: bool = False,
+ ignore_self: int = 1,
+ as_edge: bool = False,
+ ) -> Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]]:
"""For documentation, see util.Traversable._traverse()
Trees are set to visit_once = False to gain more performance in the traversal"""
@@ -321,9 +355,17 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
# ret_tup = itertools.tee(ret, 2)
# assert is_tree_traversed(ret_tup), f"Type is {[type(x) for x in list(ret_tup[0])]}"
# return ret_tup[0]"""
- return cast(Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]],
- super(Tree, self)._traverse(predicate, prune, depth, # type: ignore
- branch_first, visit_once, ignore_self))
+ return cast(
+ Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]],
+ super(Tree, self)._traverse(
+ predicate,
+ prune,
+ depth, # type: ignore
+ branch_first,
+ visit_once,
+ ignore_self,
+ ),
+ )
def list_traverse(self, *args: Any, **kwargs: Any) -> IterableList[IndexObjUnion]:
"""
@@ -331,7 +373,7 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
traverse()
Tree -> IterableList[Union['Submodule', 'Tree', 'Blob']]
"""
- return super(Tree, self)._list_traverse(* args, **kwargs)
+ return super(Tree, self)._list_traverse(*args, **kwargs)
# List protocol
@@ -347,7 +389,9 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
def __getitem__(self, item: Union[str, int, slice]) -> IndexObjUnion:
if isinstance(item, int):
info = self._cache[item]
- return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
+ return self._map_id_to_type[info[1] >> 12](
+ self.repo, info[0], info[1], join_path(self.path, info[2])
+ )
if isinstance(item, str):
# compatibility
@@ -378,7 +422,7 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
def __reversed__(self) -> Iterator[IndexObjUnion]:
return reversed(self._iter_convert_to_object(self._cache)) # type: ignore
- def _serialize(self, stream: 'BytesIO') -> 'Tree':
+ def _serialize(self, stream: "BytesIO") -> "Tree":
"""Serialize this tree into the stream. Please note that we will assume
our tree data to be in a sorted state. If this is not the case, serialization
will not generate a correct tree representation as these are assumed to be sorted
@@ -386,7 +430,7 @@ class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
tree_to_stream(self._cache, stream.write)
return self
- def _deserialize(self, stream: 'BytesIO') -> 'Tree':
+ def _deserialize(self, stream: "BytesIO") -> "Tree":
self._cache = tree_entries_from_data(stream.read())
return self
diff --git a/git/objects/util.py b/git/objects/util.py
index 800eccdf..4ba59c8a 100644
--- a/git/objects/util.py
+++ b/git/objects/util.py
@@ -7,11 +7,7 @@
from abc import ABC, abstractmethod
import warnings
-from git.util import (
- IterableList,
- IterableObj,
- Actor
-)
+from git.util import IterableList, IterableObj, Actor
import re
from collections import deque
@@ -22,10 +18,24 @@ import calendar
from datetime import datetime, timedelta, tzinfo
# typing ------------------------------------------------------------
-from typing import (Any, Callable, Deque, Iterator, Generic, NamedTuple, overload, Sequence, # NOQA: F401
- TYPE_CHECKING, Tuple, Type, TypeVar, Union, cast)
+from typing import (
+ Any,
+ Callable,
+ Deque,
+ Iterator,
+ Generic,
+ NamedTuple,
+ overload,
+ Sequence, # NOQA: F401
+ TYPE_CHECKING,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
-from git.types import Has_id_attribute, Literal, _T # NOQA: F401
+from git.types import Has_id_attribute, Literal, _T # NOQA: F401
if TYPE_CHECKING:
from io import BytesIO, StringIO
@@ -46,24 +56,38 @@ else:
class TraverseNT(NamedTuple):
depth: int
- item: Union['Traversable', 'Blob']
- src: Union['Traversable', None]
+ item: Union["Traversable", "Blob"]
+ src: Union["Traversable", None]
-T_TIobj = TypeVar('T_TIobj', bound='TraversableIterableObj') # for TraversableIterableObj.traverse()
+T_TIobj = TypeVar(
+ "T_TIobj", bound="TraversableIterableObj"
+) # for TraversableIterableObj.traverse()
-TraversedTup = Union[Tuple[Union['Traversable', None], 'Traversable'], # for commit, submodule
- 'TraversedTreeTup'] # for tree.traverse()
+TraversedTup = Union[
+ Tuple[Union["Traversable", None], "Traversable"], # for commit, submodule
+ "TraversedTreeTup",
+] # for tree.traverse()
# --------------------------------------------------------------------
-__all__ = ('get_object_type_by_name', 'parse_date', 'parse_actor_and_date',
- 'ProcessStreamAdapter', 'Traversable', 'altz_to_utctz_str', 'utctz_to_altz',
- 'verify_utctz', 'Actor', 'tzoffset', 'utc')
+__all__ = (
+ "get_object_type_by_name",
+ "parse_date",
+ "parse_actor_and_date",
+ "ProcessStreamAdapter",
+ "Traversable",
+ "altz_to_utctz_str",
+ "utctz_to_altz",
+ "verify_utctz",
+ "Actor",
+ "tzoffset",
+ "utc",
+)
ZERO = timedelta(0)
-#{ Functions
+# { Functions
def mode_str_to_int(modestr: Union[bytes, str]) -> int:
@@ -82,8 +106,9 @@ def mode_str_to_int(modestr: Union[bytes, str]) -> int:
return mode
-def get_object_type_by_name(object_type_name: bytes
- ) -> Union[Type['Commit'], Type['TagObject'], Type['Tree'], Type['Blob']]:
+def get_object_type_by_name(
+ object_type_name: bytes,
+) -> Union[Type["Commit"], Type["TagObject"], Type["Tree"], Type["Blob"]]:
"""
:return: type suitable to handle the given object type name.
Use the type to create new instances.
@@ -93,18 +118,24 @@ def get_object_type_by_name(object_type_name: bytes
:raise ValueError: In case object_type_name is unknown"""
if object_type_name == b"commit":
from . import commit
+
return commit.Commit
elif object_type_name == b"tag":
from . import tag
+
return tag.TagObject
elif object_type_name == b"blob":
from . import blob
+
return blob.Blob
elif object_type_name == b"tree":
from . import tree
+
return tree.Tree
else:
- raise ValueError("Cannot handle unknown object type: %s" % object_type_name.decode())
+ raise ValueError(
+ "Cannot handle unknown object type: %s" % object_type_name.decode()
+ )
def utctz_to_altz(utctz: str) -> int:
@@ -121,7 +152,7 @@ def altz_to_utctz_str(altz: float) -> str:
utci = -1 * int((float(altz) / 3600) * 100)
utcs = str(abs(utci))
utcs = "0" * (4 - len(utcs)) + utcs
- prefix = (utci < 0 and '-') or '+'
+ prefix = (utci < 0 and "-") or "+"
return prefix + utcs
@@ -133,22 +164,23 @@ def verify_utctz(offset: str) -> str:
raise fmt_exc
if offset[0] not in "+-":
raise fmt_exc
- if offset[1] not in digits or\
- offset[2] not in digits or\
- offset[3] not in digits or\
- offset[4] not in digits:
+ if (
+ offset[1] not in digits
+ or offset[2] not in digits
+ or offset[3] not in digits
+ or offset[4] not in digits
+ ):
raise fmt_exc
# END for each char
return offset
class tzoffset(tzinfo):
-
def __init__(self, secs_west_of_utc: float, name: Union[None, str] = None) -> None:
self._offset = timedelta(seconds=-secs_west_of_utc)
- self._name = name or 'fixed'
+ self._name = name or "fixed"
- def __reduce__(self) -> Tuple[Type['tzoffset'], Tuple[float, str]]:
+ def __reduce__(self) -> Tuple[Type["tzoffset"], Tuple[float, str]]:
return tzoffset, (-self._offset.total_seconds(), self._name)
def utcoffset(self, dt: Union[datetime, None]) -> timedelta:
@@ -161,7 +193,7 @@ class tzoffset(tzinfo):
return ZERO
-utc = tzoffset(0, 'UTC')
+utc = tzoffset(0, "UTC")
def from_timestamp(timestamp: float, tz_offset: float) -> datetime:
@@ -190,23 +222,27 @@ def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
"""
if isinstance(string_date, datetime):
if string_date.tzinfo:
- utcoffset = cast(timedelta, string_date.utcoffset()) # typeguard, if tzinfoand is not None
+ utcoffset = cast(
+ timedelta, string_date.utcoffset()
+ ) # typeguard, if tzinfoand is not None
offset = -int(utcoffset.total_seconds())
return int(string_date.astimezone(utc).timestamp()), offset
else:
- raise ValueError(f"string_date datetime object without tzinfo, {string_date}")
+ raise ValueError(
+ f"string_date datetime object without tzinfo, {string_date}"
+ )
# git time
try:
- if string_date.count(' ') == 1 and string_date.rfind(':') == -1:
+ if string_date.count(" ") == 1 and string_date.rfind(":") == -1:
timestamp, offset_str = string_date.split()
- if timestamp.startswith('@'):
+ if timestamp.startswith("@"):
timestamp = timestamp[1:]
timestamp_int = int(timestamp)
return timestamp_int, utctz_to_altz(verify_utctz(offset_str))
else:
- offset_str = "+0000" # local time by default
- if string_date[-5] in '-+':
+ offset_str = "+0000" # local time by default
+ if string_date[-5] in "-+":
offset_str = verify_utctz(string_date[-5:])
string_date = string_date[:-6] # skip space as well
# END split timezone info
@@ -215,9 +251,9 @@ def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
# now figure out the date and time portion - split time
date_formats = []
splitter = -1
- if ',' in string_date:
+ if "," in string_date:
date_formats.append("%a, %d %b %Y")
- splitter = string_date.rfind(' ')
+ splitter = string_date.rfind(" ")
else:
# iso plus additional
date_formats.append("%Y-%m-%d")
@@ -225,16 +261,16 @@ def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
date_formats.append("%m/%d/%Y")
date_formats.append("%d.%m.%Y")
- splitter = string_date.rfind('T')
+ splitter = string_date.rfind("T")
if splitter == -1:
- splitter = string_date.rfind(' ')
+ splitter = string_date.rfind(" ")
# END handle 'T' and ' '
# END handle rfc or iso
assert splitter > -1
# split date and time
- time_part = string_date[splitter + 1:] # skip space
+ time_part = string_date[splitter + 1 :] # skip space
date_part = string_date[:splitter]
# parse time
@@ -243,9 +279,19 @@ def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
for fmt in date_formats:
try:
dtstruct = time.strptime(date_part, fmt)
- utctime = calendar.timegm((dtstruct.tm_year, dtstruct.tm_mon, dtstruct.tm_mday,
- tstruct.tm_hour, tstruct.tm_min, tstruct.tm_sec,
- dtstruct.tm_wday, dtstruct.tm_yday, tstruct.tm_isdst))
+ utctime = calendar.timegm(
+ (
+ dtstruct.tm_year,
+ dtstruct.tm_mon,
+ dtstruct.tm_mday,
+ tstruct.tm_hour,
+ tstruct.tm_min,
+ tstruct.tm_sec,
+ dtstruct.tm_wday,
+ dtstruct.tm_yday,
+ tstruct.tm_isdst,
+ )
+ )
return int(utctime), offset
except ValueError:
continue
@@ -256,13 +302,15 @@ def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
raise ValueError("no format matched")
# END handle format
except Exception as e:
- raise ValueError(f"Unsupported date format or type: {string_date}, type={type(string_date)}") from e
+ raise ValueError(
+ f"Unsupported date format or type: {string_date}, type={type(string_date)}"
+ ) from e
# END handle exceptions
# precompiled regex
-_re_actor_epoch = re.compile(r'^.+? (.*) (\d+) ([+-]\d+).*$')
-_re_only_actor = re.compile(r'^.+? (.*)$')
+_re_actor_epoch = re.compile(r"^.+? (.*) (\d+) ([+-]\d+).*$")
+_re_only_actor = re.compile(r"^.+? (.*)$")
def parse_actor_and_date(line: str) -> Tuple[Actor, int, int]:
@@ -271,19 +319,21 @@ def parse_actor_and_date(line: str) -> Tuple[Actor, int, int]:
author Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
:return: [Actor, int_seconds_since_epoch, int_timezone_offset]"""
- actor, epoch, offset = '', '0', '0'
+ actor, epoch, offset = "", "0", "0"
m = _re_actor_epoch.search(line)
if m:
actor, epoch, offset = m.groups()
else:
m = _re_only_actor.search(line)
- actor = m.group(1) if m else line or ''
+ actor = m.group(1) if m else line or ""
return (Actor._from_string(actor), int(epoch), utctz_to_altz(offset))
-#} END functions
+
+# } END functions
-#{ Classes
+# { Classes
+
class ProcessStreamAdapter(object):
@@ -292,9 +342,10 @@ class ProcessStreamAdapter(object):
Use this type to hide the underlying process to provide access only to a specified
stream. The process is usually wrapped into an AutoInterrupt class to kill
it if the instance goes out of scope."""
+
__slots__ = ("_proc", "_stream")
- def __init__(self, process: 'Popen', stream_name: str) -> None:
+ def __init__(self, process: "Popen", stream_name: str) -> None:
self._proc = process
self._stream: StringIO = getattr(process, stream_name) # guessed type
@@ -312,11 +363,12 @@ class Traversable(Protocol):
Defined subclasses = [Commit, Tree, SubModule]
"""
+
__slots__ = ()
@classmethod
@abstractmethod
- def _get_intermediate_items(cls, item: Any) -> Sequence['Traversable']:
+ def _get_intermediate_items(cls, item: Any) -> Sequence["Traversable"]:
"""
Returns:
Tuple of items connected to the given item.
@@ -331,15 +383,18 @@ class Traversable(Protocol):
@abstractmethod
def list_traverse(self, *args: Any, **kwargs: Any) -> Any:
""" """
- warnings.warn("list_traverse() method should only be called from subclasses."
- "Calling from Traversable abstract class will raise NotImplementedError in 3.1.20"
- "Builtin sublclasses are 'Submodule', 'Tree' and 'Commit",
- DeprecationWarning,
- stacklevel=2)
+ warnings.warn(
+ "list_traverse() method should only be called from subclasses."
+ "Calling from Traversable abstract class will raise NotImplementedError in 3.1.20"
+ "Builtin sublclasses are 'Submodule', 'Tree' and 'Commit",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return self._list_traverse(*args, **kwargs)
- def _list_traverse(self, as_edge: bool = False, *args: Any, **kwargs: Any
- ) -> IterableList[Union['Commit', 'Submodule', 'Tree', 'Blob']]:
+ def _list_traverse(
+ self, as_edge: bool = False, *args: Any, **kwargs: Any
+ ) -> IterableList[Union["Commit", "Submodule", "Tree", "Blob"]]:
"""
:return: IterableList with the results of the traversal as produced by
traverse()
@@ -352,11 +407,13 @@ class Traversable(Protocol):
if isinstance(self, Has_id_attribute):
id = self._id_attribute_
else:
- id = "" # shouldn't reach here, unless Traversable subclass created with no _id_attribute_
+ id = "" # shouldn't reach here, unless Traversable subclass created with no _id_attribute_
# could add _id_attribute_ to Traversable, or make all Traversable also Iterable?
if not as_edge:
- out: IterableList[Union['Commit', 'Submodule', 'Tree', 'Blob']] = IterableList(id)
+ out: IterableList[
+ Union["Commit", "Submodule", "Tree", "Blob"]
+ ] = IterableList(id)
out.extend(self.traverse(as_edge=as_edge, *args, **kwargs))
return out
# overloads in subclasses (mypy doesn't allow typing self: subclass)
@@ -366,23 +423,32 @@ class Traversable(Protocol):
out_list: IterableList = IterableList(self.traverse(*args, **kwargs))
return out_list
- @ abstractmethod
+ @abstractmethod
def traverse(self, *args: Any, **kwargs: Any) -> Any:
""" """
- warnings.warn("traverse() method should only be called from subclasses."
- "Calling from Traversable abstract class will raise NotImplementedError in 3.1.20"
- "Builtin sublclasses are 'Submodule', 'Tree' and 'Commit",
- DeprecationWarning,
- stacklevel=2)
+ warnings.warn(
+ "traverse() method should only be called from subclasses."
+ "Calling from Traversable abstract class will raise NotImplementedError in 3.1.20"
+ "Builtin sublclasses are 'Submodule', 'Tree' and 'Commit",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return self._traverse(*args, **kwargs)
- def _traverse(self,
- predicate: Callable[[Union['Traversable', 'Blob', TraversedTup], int], bool] = lambda i, d: True,
- prune: Callable[[Union['Traversable', 'Blob', TraversedTup], int], bool] = lambda i, d: False,
- depth: int = -1, branch_first: bool = True, visit_once: bool = True,
- ignore_self: int = 1, as_edge: bool = False
- ) -> Union[Iterator[Union['Traversable', 'Blob']],
- Iterator[TraversedTup]]:
+ def _traverse(
+ self,
+ predicate: Callable[
+ [Union["Traversable", "Blob", TraversedTup], int], bool
+ ] = lambda i, d: True,
+ prune: Callable[
+ [Union["Traversable", "Blob", TraversedTup], int], bool
+ ] = lambda i, d: False,
+ depth: int = -1,
+ branch_first: bool = True,
+ visit_once: bool = True,
+ ignore_self: int = 1,
+ as_edge: bool = False,
+ ) -> Union[Iterator[Union["Traversable", "Blob"]], Iterator[TraversedTup]]:
""":return: iterator yielding of items found when traversing self
:param predicate: f(i,d) returns False if item i at depth d should not be included in the result
@@ -426,24 +492,30 @@ class Traversable(Protocol):
visited = set()
stack: Deque[TraverseNT] = deque()
- stack.append(TraverseNT(0, self, None)) # self is always depth level 0
-
- def addToStack(stack: Deque[TraverseNT],
- src_item: 'Traversable',
- branch_first: bool,
- depth: int) -> None:
+ stack.append(TraverseNT(0, self, None)) # self is always depth level 0
+
+ def addToStack(
+ stack: Deque[TraverseNT],
+ src_item: "Traversable",
+ branch_first: bool,
+ depth: int,
+ ) -> None:
lst = self._get_intermediate_items(item)
- if not lst: # empty list
+ if not lst: # empty list
return None
if branch_first:
stack.extendleft(TraverseNT(depth, i, src_item) for i in lst)
else:
- reviter = (TraverseNT(depth, lst[i], src_item) for i in range(len(lst) - 1, -1, -1))
+ reviter = (
+ TraverseNT(depth, lst[i], src_item)
+ for i in range(len(lst) - 1, -1, -1)
+ )
stack.extend(reviter)
+
# END addToStack local method
while stack:
- d, item, src = stack.pop() # depth of item, item, item_source
+ d, item, src = stack.pop() # depth of item, item, item_source
if visit_once and item in visited:
continue
@@ -451,8 +523,10 @@ class Traversable(Protocol):
if visit_once:
visited.add(item)
- rval: Union[TraversedTup, 'Traversable', 'Blob']
- if as_edge: # if as_edge return (src, item) unless rrc is None (e.g. for first item)
+ rval: Union[TraversedTup, "Traversable", "Blob"]
+ if (
+ as_edge
+ ): # if as_edge return (src, item) unless rrc is None (e.g. for first item)
rval = (src, item)
else:
rval = item
@@ -473,14 +547,15 @@ class Traversable(Protocol):
# END for each item on work stack
-@ runtime_checkable
+@runtime_checkable
class Serializable(Protocol):
"""Defines methods to serialize and deserialize objects from and into a data stream"""
+
__slots__ = ()
# @abstractmethod
- def _serialize(self, stream: 'BytesIO') -> 'Serializable':
+ def _serialize(self, stream: "BytesIO") -> "Serializable":
"""Serialize the data of this object into the given data stream
:note: a serialized object would ``_deserialize`` into the same object
:param stream: a file-like object
@@ -488,7 +563,7 @@ class Serializable(Protocol):
raise NotImplementedError("To be implemented in subclass")
# @abstractmethod
- def _deserialize(self, stream: 'BytesIO') -> 'Serializable':
+ def _deserialize(self, stream: "BytesIO") -> "Serializable":
"""Deserialize all information regarding this object from the stream
:param stream: a file-like object
:return: self"""
@@ -500,54 +575,76 @@ class TraversableIterableObj(IterableObj, Traversable):
TIobj_tuple = Tuple[Union[T_TIobj, None], T_TIobj]
- def list_traverse(self: T_TIobj, *args: Any, **kwargs: Any) -> IterableList[T_TIobj]:
- return super(TraversableIterableObj, self)._list_traverse(* args, **kwargs)
+ def list_traverse(
+ self: T_TIobj, *args: Any, **kwargs: Any
+ ) -> IterableList[T_TIobj]:
+ return super(TraversableIterableObj, self)._list_traverse(*args, **kwargs)
- @ overload # type: ignore
- def traverse(self: T_TIobj
- ) -> Iterator[T_TIobj]:
+ @overload # type: ignore
+ def traverse(self: T_TIobj) -> Iterator[T_TIobj]:
...
- @ overload
- def traverse(self: T_TIobj,
- predicate: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
- prune: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
- depth: int, branch_first: bool, visit_once: bool,
- ignore_self: Literal[True],
- as_edge: Literal[False],
- ) -> Iterator[T_TIobj]:
+ @overload
+ def traverse(
+ self: T_TIobj,
+ predicate: Callable[
+ [Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool
+ ],
+ prune: Callable[
+ [Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool
+ ],
+ depth: int,
+ branch_first: bool,
+ visit_once: bool,
+ ignore_self: Literal[True],
+ as_edge: Literal[False],
+ ) -> Iterator[T_TIobj]:
...
- @ overload
- def traverse(self: T_TIobj,
- predicate: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
- prune: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
- depth: int, branch_first: bool, visit_once: bool,
- ignore_self: Literal[False],
- as_edge: Literal[True],
- ) -> Iterator[Tuple[Union[T_TIobj, None], T_TIobj]]:
+ @overload
+ def traverse(
+ self: T_TIobj,
+ predicate: Callable[
+ [Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool
+ ],
+ prune: Callable[
+ [Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool
+ ],
+ depth: int,
+ branch_first: bool,
+ visit_once: bool,
+ ignore_self: Literal[False],
+ as_edge: Literal[True],
+ ) -> Iterator[Tuple[Union[T_TIobj, None], T_TIobj]]:
...
- @ overload
- def traverse(self: T_TIobj,
- predicate: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
- prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
- depth: int, branch_first: bool, visit_once: bool,
- ignore_self: Literal[True],
- as_edge: Literal[True],
- ) -> Iterator[Tuple[T_TIobj, T_TIobj]]:
+ @overload
+ def traverse(
+ self: T_TIobj,
+ predicate: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
+ prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
+ depth: int,
+ branch_first: bool,
+ visit_once: bool,
+ ignore_self: Literal[True],
+ as_edge: Literal[True],
+ ) -> Iterator[Tuple[T_TIobj, T_TIobj]]:
...
- def traverse(self: T_TIobj,
- predicate: Callable[[Union[T_TIobj, TIobj_tuple], int],
- bool] = lambda i, d: True,
- prune: Callable[[Union[T_TIobj, TIobj_tuple], int],
- bool] = lambda i, d: False,
- depth: int = -1, branch_first: bool = True, visit_once: bool = True,
- ignore_self: int = 1, as_edge: bool = False
- ) -> Union[Iterator[T_TIobj],
- Iterator[Tuple[T_TIobj, T_TIobj]],
- Iterator[TIobj_tuple]]:
+ def traverse(
+ self: T_TIobj,
+ predicate: Callable[
+ [Union[T_TIobj, TIobj_tuple], int], bool
+ ] = lambda i, d: True,
+ prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool] = lambda i, d: False,
+ depth: int = -1,
+ branch_first: bool = True,
+ visit_once: bool = True,
+ ignore_self: int = 1,
+ as_edge: bool = False,
+ ) -> Union[
+ Iterator[T_TIobj], Iterator[Tuple[T_TIobj, T_TIobj]], Iterator[TIobj_tuple]
+ ]:
"""For documentation, see util.Traversable._traverse()"""
"""
@@ -566,8 +663,9 @@ class TraversableIterableObj(IterableObj, Traversable):
assert is_commit_traversed(ret_tup), f"{[type(x) for x in list(ret_tup[0])]}"
return ret_tup[0]
"""
- return cast(Union[Iterator[T_TIobj],
- Iterator[Tuple[Union[None, T_TIobj], T_TIobj]]],
- super(TraversableIterableObj, self)._traverse(
- predicate, prune, depth, branch_first, visit_once, ignore_self, as_edge # type: ignore
- ))
+ return cast(
+ Union[Iterator[T_TIobj], Iterator[Tuple[Union[None, T_TIobj], T_TIobj]]],
+ super(TraversableIterableObj, self)._traverse(
+ predicate, prune, depth, branch_first, visit_once, ignore_self, as_edge # type: ignore
+ ),
+ )
diff --git a/git/refs/head.py b/git/refs/head.py
index d1d72c7b..befdc135 100644
--- a/git/refs/head.py
+++ b/git/refs/head.py
@@ -31,15 +31,18 @@ class HEAD(SymbolicReference):
"""Special case of a Symbolic Reference as it represents the repository's
HEAD reference."""
- _HEAD_NAME = 'HEAD'
- _ORIG_HEAD_NAME = 'ORIG_HEAD'
+
+ _HEAD_NAME = "HEAD"
+ _ORIG_HEAD_NAME = "ORIG_HEAD"
__slots__ = ()
- def __init__(self, repo: 'Repo', path: PathLike = _HEAD_NAME):
+ def __init__(self, repo: "Repo", path: PathLike = _HEAD_NAME):
if path != self._HEAD_NAME:
- raise ValueError("HEAD instance must point to %r, got %r" % (self._HEAD_NAME, path))
+ raise ValueError(
+ "HEAD instance must point to %r, got %r" % (self._HEAD_NAME, path)
+ )
super(HEAD, self).__init__(repo, path)
- self.commit: 'Commit'
+ self.commit: "Commit"
def orig_head(self) -> SymbolicReference:
"""
@@ -47,9 +50,14 @@ class HEAD(SymbolicReference):
to contain the previous value of HEAD"""
return SymbolicReference(self.repo, self._ORIG_HEAD_NAME)
- def reset(self, commit: Union[Commit_ish, SymbolicReference, str] = 'HEAD',
- index: bool = True, working_tree: bool = False,
- paths: Union[PathLike, Sequence[PathLike], None] = None, **kwargs: Any) -> 'HEAD':
+ def reset(
+ self,
+ commit: Union[Commit_ish, SymbolicReference, str] = "HEAD",
+ index: bool = True,
+ working_tree: bool = False,
+ paths: Union[PathLike, Sequence[PathLike], None] = None,
+ **kwargs: Any
+ ) -> "HEAD":
"""Reset our HEAD to the given commit optionally synchronizing
the index and working tree. The reference we refer to will be set to
commit as well.
@@ -90,12 +98,14 @@ class HEAD(SymbolicReference):
if working_tree:
mode = "--hard"
if not index:
- raise ValueError("Cannot reset the working tree if the index is not reset as well")
+ raise ValueError(
+ "Cannot reset the working tree if the index is not reset as well"
+ )
# END working tree handling
try:
- self.repo.git.reset(mode, commit, '--', paths, **kwargs)
+ self.repo.git.reset(mode, commit, "--", paths, **kwargs)
except GitCommandError as e:
# git nowadays may use 1 as status to indicate there are still unstaged
# modifications after the reset
@@ -124,12 +134,19 @@ class Head(Reference):
>>> head.commit.hexsha
'1c09f116cbc2cb4100fb6935bb162daa4723f455'"""
+
_common_path_default = "refs/heads"
k_config_remote = "remote"
- k_config_remote_ref = "merge" # branch to merge from remote
+ k_config_remote_ref = "merge" # branch to merge from remote
@classmethod
- def delete(cls, repo: 'Repo', *heads: 'Union[Head, str]', force: bool = False, **kwargs: Any) -> None:
+ def delete(
+ cls,
+ repo: "Repo",
+ *heads: "Union[Head, str]",
+ force: bool = False,
+ **kwargs: Any
+ ) -> None:
"""Delete the given heads
:param force:
@@ -141,7 +158,9 @@ class Head(Reference):
flag = "-D"
repo.git.branch(flag, *heads)
- def set_tracking_branch(self, remote_reference: Union['RemoteReference', None]) -> 'Head':
+ def set_tracking_branch(
+ self, remote_reference: Union["RemoteReference", None]
+ ) -> "Head":
"""
Configure this branch to track the given remote reference. This will alter
this branch's configuration accordingly.
@@ -150,7 +169,10 @@ class Head(Reference):
any references
:return: self"""
from .remote import RemoteReference
- if remote_reference is not None and not isinstance(remote_reference, RemoteReference):
+
+ if remote_reference is not None and not isinstance(
+ remote_reference, RemoteReference
+ ):
raise ValueError("Incorrect parameter type: %r" % remote_reference)
# END handle type
@@ -162,26 +184,39 @@ class Head(Reference):
writer.remove_section()
else:
writer.set_value(self.k_config_remote, remote_reference.remote_name)
- writer.set_value(self.k_config_remote_ref, Head.to_full_path(remote_reference.remote_head))
+ writer.set_value(
+ self.k_config_remote_ref,
+ Head.to_full_path(remote_reference.remote_head),
+ )
return self
- def tracking_branch(self) -> Union['RemoteReference', None]:
+ def tracking_branch(self) -> Union["RemoteReference", None]:
"""
:return: The remote_reference we are tracking, or None if we are
not a tracking branch"""
from .remote import RemoteReference
+
reader = self.config_reader()
- if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref):
- ref = Head(self.repo, Head.to_full_path(strip_quotes(reader.get_value(self.k_config_remote_ref))))
- remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name))
+ if reader.has_option(self.k_config_remote) and reader.has_option(
+ self.k_config_remote_ref
+ ):
+ ref = Head(
+ self.repo,
+ Head.to_full_path(
+ strip_quotes(reader.get_value(self.k_config_remote_ref))
+ ),
+ )
+ remote_refpath = RemoteReference.to_full_path(
+ join_path(reader.get_value(self.k_config_remote), ref.name)
+ )
return RemoteReference(self.repo, remote_refpath)
# END handle have tracking branch
# we are not a tracking branch
return None
- def rename(self, new_path: PathLike, force: bool = False) -> 'Head':
+ def rename(self, new_path: PathLike, force: bool = False) -> "Head":
"""Rename self to a new path
:param new_path:
@@ -202,7 +237,7 @@ class Head(Reference):
self.path = "%s/%s" % (self._common_path_default, new_path)
return self
- def checkout(self, force: bool = False, **kwargs: Any) -> Union['HEAD', 'Head']:
+ def checkout(self, force: bool = False, **kwargs: Any) -> Union["HEAD", "Head"]:
"""Checkout this head by setting the HEAD to this reference, by updating the index
to reflect the tree we point to and by updating the working tree to reflect
the latest index.
@@ -227,9 +262,9 @@ class Head(Reference):
By default it is only allowed to checkout heads - everything else
will leave the HEAD detached which is allowed and possible, but remains
a special state that some tools might not be able to handle."""
- kwargs['f'] = force
- if kwargs['f'] is False:
- kwargs.pop('f')
+ kwargs["f"] = force
+ if kwargs["f"] is False:
+ kwargs.pop("f")
self.repo.git.checkout(self, **kwargs)
if self.repo.head.is_detached:
@@ -237,7 +272,7 @@ class Head(Reference):
else:
return self.repo.active_branch
- #{ Configuration
+ # { Configuration
def _config_parser(self, read_only: bool) -> SectionConstraint[GitConfigParser]:
if read_only:
parser = self.repo.config_reader()
@@ -259,4 +294,4 @@ class Head(Reference):
to options of this head"""
return self._config_parser(read_only=False)
- #} END configuration
+ # } END configuration
diff --git a/git/refs/log.py b/git/refs/log.py
index ddd78bc7..908f93d1 100644
--- a/git/refs/log.py
+++ b/git/refs/log.py
@@ -1,4 +1,3 @@
-
from mmap import mmap
import re
import time as _time
@@ -16,7 +15,7 @@ from git.util import (
assure_directory_exists,
to_native_path,
bin_to_hex,
- file_contents_ro_filepath
+ file_contents_ro_filepath,
)
import os.path as osp
@@ -41,7 +40,8 @@ __all__ = ["RefLog", "RefLogEntry"]
class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
"""Named tuple allowing easy access to the revlog data fields"""
- _re_hexsha_only = re.compile('^[0-9A-Fa-f]{40}$')
+
+ _re_hexsha_only = re.compile("^[0-9A-Fa-f]{40}$")
__slots__ = ()
def __repr__(self) -> str:
@@ -52,13 +52,15 @@ class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
""":return: a string suitable to be placed in a reflog file"""
act = self.actor
time = self.time
- return "{} {} {} <{}> {!s} {}\t{}\n".format(self.oldhexsha,
- self.newhexsha,
- act.name,
- act.email,
- time[0],
- altz_to_utctz_str(time[1]),
- self.message)
+ return "{} {} {} <{}> {!s} {}\t{}\n".format(
+ self.oldhexsha,
+ self.newhexsha,
+ act.name,
+ act.email,
+ time[0],
+ altz_to_utctz_str(time[1]),
+ self.message,
+ )
@property
def oldhexsha(self) -> str:
@@ -80,7 +82,7 @@ class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
"""time as tuple:
* [0] = int(time)
- * [1] = int(timezone_offset) in time.altzone format """
+ * [1] = int(timezone_offset) in time.altzone format"""
return self[3]
@property
@@ -89,8 +91,15 @@ class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
return self[4]
@classmethod
- def new(cls, oldhexsha: str, newhexsha: str, actor: Actor, time: int, tz_offset: int, message: str
- ) -> 'RefLogEntry': # skipcq: PYL-W0621
+ def new(
+ cls,
+ oldhexsha: str,
+ newhexsha: str,
+ actor: Actor,
+ time: int,
+ tz_offset: int,
+ message: str,
+ ) -> "RefLogEntry": # skipcq: PYL-W0621
""":return: New instance of a RefLogEntry"""
if not isinstance(actor, Actor):
raise ValueError("Need actor instance, got %s" % actor)
@@ -98,19 +107,21 @@ class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
return RefLogEntry((oldhexsha, newhexsha, actor, (time, tz_offset), message))
@classmethod
- def from_line(cls, line: bytes) -> 'RefLogEntry':
+ def from_line(cls, line: bytes) -> "RefLogEntry":
""":return: New RefLogEntry instance from the given revlog line.
:param line: line bytes without trailing newline
:raise ValueError: If line could not be parsed"""
line_str = line.decode(defenc)
- fields = line_str.split('\t', 1)
+ fields = line_str.split("\t", 1)
if len(fields) == 1:
info, msg = fields[0], None
elif len(fields) == 2:
info, msg = fields
else:
- raise ValueError("Line must have up to two TAB-separated fields."
- " Got %s" % repr(line_str))
+ raise ValueError(
+ "Line must have up to two TAB-separated fields."
+ " Got %s" % repr(line_str)
+ )
# END handle first split
oldhexsha = info[:40]
@@ -121,14 +132,13 @@ class RefLogEntry(Tuple[str, str, Actor, Tuple[int, int], str]):
# END if hexsha re doesn't match
# END for each hexsha
- email_end = info.find('>', 82)
+ email_end = info.find(">", 82)
if email_end == -1:
raise ValueError("Missing token: >")
# END handle missing end brace
- actor = Actor._from_string(info[82:email_end + 1])
- time, tz_offset = parse_date(
- info[email_end + 2:]) # skipcq: PYL-W0621
+ actor = Actor._from_string(info[82 : email_end + 1])
+ time, tz_offset = parse_date(info[email_end + 2 :]) # skipcq: PYL-W0621
return RefLogEntry((oldhexsha, newhexsha, actor, (time, tz_offset), msg))
@@ -142,9 +152,9 @@ class RefLog(List[RefLogEntry], Serializable):
Reflog entries are ordered, the first added entry is first in the list, the last
entry, i.e. the last change of the head or reference, is last in the list."""
- __slots__ = ('_path', )
+ __slots__ = ("_path",)
- def __new__(cls, filepath: Union[PathLike, None] = None) -> 'RefLog':
+ def __new__(cls, filepath: Union[PathLike, None] = None) -> "RefLog":
inst = super(RefLog, cls).__new__(cls)
return inst
@@ -159,8 +169,7 @@ class RefLog(List[RefLogEntry], Serializable):
def _read_from_file(self) -> None:
try:
- fmap = file_contents_ro_filepath(
- self._path, stream=True, allow_mmap=True)
+ fmap = file_contents_ro_filepath(self._path, stream=True, allow_mmap=True)
except OSError:
# it is possible and allowed that the file doesn't exist !
return
@@ -175,7 +184,7 @@ class RefLog(List[RefLogEntry], Serializable):
# { Interface
@classmethod
- def from_file(cls, filepath: PathLike) -> 'RefLog':
+ def from_file(cls, filepath: PathLike) -> "RefLog":
"""
:return: a new RefLog instance containing all entries from the reflog
at the given filepath
@@ -184,7 +193,7 @@ class RefLog(List[RefLogEntry], Serializable):
return cls(filepath)
@classmethod
- def path(cls, ref: 'SymbolicReference') -> str:
+ def path(cls, ref: "SymbolicReference") -> str:
"""
:return: string to absolute path at which the reflog of the given ref
instance would be found. The path is not guaranteed to point to a valid
@@ -193,7 +202,7 @@ class RefLog(List[RefLogEntry], Serializable):
return osp.join(ref.repo.git_dir, "logs", to_native_path(ref.path))
@classmethod
- def iter_entries(cls, stream: Union[str, 'BytesIO', mmap]) -> Iterator[RefLogEntry]:
+ def iter_entries(cls, stream: Union[str, "BytesIO", mmap]) -> Iterator[RefLogEntry]:
"""
:return: Iterator yielding RefLogEntry instances, one for each line read
sfrom the given stream.
@@ -215,7 +224,7 @@ class RefLog(List[RefLogEntry], Serializable):
# END endless loop
@classmethod
- def entry_at(cls, filepath: PathLike, index: int) -> 'RefLogEntry':
+ def entry_at(cls, filepath: PathLike, index: int) -> "RefLogEntry":
"""
:return: RefLogEntry at the given index
@@ -230,7 +239,7 @@ class RefLog(List[RefLogEntry], Serializable):
all other lines. Nonetheless, the whole file has to be read if
the index is negative
"""
- with open(filepath, 'rb') as fp:
+ with open(filepath, "rb") as fp:
if index < 0:
return RefLogEntry.from_line(fp.readlines()[index].strip())
# read until index is reached
@@ -239,7 +248,8 @@ class RefLog(List[RefLogEntry], Serializable):
line = fp.readline()
if not line:
raise IndexError(
- f"Index file ended at line {i+1}, before given index was reached")
+ f"Index file ended at line {i+1}, before given index was reached"
+ )
# END abort on eof
# END handle runup
@@ -263,9 +273,15 @@ class RefLog(List[RefLogEntry], Serializable):
# END handle change
@classmethod
- def append_entry(cls, config_reader: Union[Actor, 'GitConfigParser', 'SectionConstraint', None],
- filepath: PathLike, oldbinsha: bytes, newbinsha: bytes, message: str,
- write: bool = True) -> 'RefLogEntry':
+ def append_entry(
+ cls,
+ config_reader: Union[Actor, "GitConfigParser", "SectionConstraint", None],
+ filepath: PathLike,
+ oldbinsha: bytes,
+ newbinsha: bytes,
+ message: str,
+ write: bool = True,
+ ) -> "RefLogEntry":
"""Append a new log entry to the revlog at filepath.
:param config_reader: configuration reader of the repository - used to obtain
@@ -286,21 +302,27 @@ class RefLog(List[RefLogEntry], Serializable):
raise ValueError("Shas need to be given in binary format")
# END handle sha type
assure_directory_exists(filepath, is_file=True)
- first_line = message.split('\n')[0]
+ first_line = message.split("\n")[0]
if isinstance(config_reader, Actor):
- committer = config_reader # mypy thinks this is Actor | Gitconfigparser, but why?
+ committer = (
+ config_reader # mypy thinks this is Actor | Gitconfigparser, but why?
+ )
else:
committer = Actor.committer(config_reader)
- entry = RefLogEntry((
- bin_to_hex(oldbinsha).decode('ascii'),
- bin_to_hex(newbinsha).decode('ascii'),
- committer, (int(_time.time()), _time.altzone), first_line
- ))
+ entry = RefLogEntry(
+ (
+ bin_to_hex(oldbinsha).decode("ascii"),
+ bin_to_hex(newbinsha).decode("ascii"),
+ committer,
+ (int(_time.time()), _time.altzone),
+ first_line,
+ )
+ )
if write:
lf = LockFile(filepath)
lf._obtain_lock_or_raise()
- fd = open(filepath, 'ab')
+ fd = open(filepath, "ab")
try:
fd.write(entry.format().encode(defenc))
finally:
@@ -309,12 +331,13 @@ class RefLog(List[RefLogEntry], Serializable):
# END handle write operation
return entry
- def write(self) -> 'RefLog':
+ def write(self) -> "RefLog":
"""Write this instance's data to the file we are originating from
:return: self"""
if self._path is None:
raise ValueError(
- "Instance was not initialized with a path, use to_file(...) instead")
+ "Instance was not initialized with a path, use to_file(...) instead"
+ )
# END assert path
self.to_file(self._path)
return self
@@ -322,7 +345,7 @@ class RefLog(List[RefLogEntry], Serializable):
# } END interface
# { Serializable Interface
- def _serialize(self, stream: 'BytesIO') -> 'RefLog':
+ def _serialize(self, stream: "BytesIO") -> "RefLog":
write = stream.write
# write all entries
@@ -331,7 +354,7 @@ class RefLog(List[RefLogEntry], Serializable):
# END for each entry
return self
- def _deserialize(self, stream: 'BytesIO') -> 'RefLog':
+ def _deserialize(self, stream: "BytesIO") -> "RefLog":
self.extend(self.iter_entries(stream))
- # } END serializable interface
+ # } END serializable interface
return self
diff --git a/git/refs/reference.py b/git/refs/reference.py
index 2a33fbff..9b946ec4 100644
--- a/git/refs/reference.py
+++ b/git/refs/reference.py
@@ -8,7 +8,7 @@ from .symbolic import SymbolicReference, T_References
# typing ------------------------------------------------------------------
from typing import Any, Callable, Iterator, Type, Union, TYPE_CHECKING # NOQA
-from git.types import Commit_ish, PathLike, _T # NOQA
+from git.types import Commit_ish, PathLike, _T # NOQA
if TYPE_CHECKING:
from git.repo import Repo
@@ -18,7 +18,7 @@ if TYPE_CHECKING:
__all__ = ["Reference"]
-#{ Utilities
+# { Utilities
def require_remote_ref_path(func: Callable[..., _T]) -> Callable[..., _T]:
@@ -26,24 +26,30 @@ def require_remote_ref_path(func: Callable[..., _T]) -> Callable[..., _T]:
def wrapper(self: T_References, *args: Any) -> _T:
if not self.is_remote():
- raise ValueError("ref path does not point to a remote reference: %s" % self.path)
+ raise ValueError(
+ "ref path does not point to a remote reference: %s" % self.path
+ )
return func(self, *args)
+
# END wrapper
wrapper.__name__ = func.__name__
return wrapper
-#}END utilities
+
+
+# }END utilities
class Reference(SymbolicReference, LazyMixin, IterableObj):
"""Represents a named reference to any object. Subclasses may apply restrictions though,
i.e. Heads can only point to commits."""
+
__slots__ = ()
_points_to_commits_only = False
_resolve_ref_on_create = True
_common_path_default = "refs"
- def __init__(self, repo: 'Repo', path: PathLike, check_path: bool = True) -> None:
+ def __init__(self, repo: "Repo", path: PathLike, check_path: bool = True) -> None:
"""Initialize this instance
:param repo: Our parent repository
@@ -52,19 +58,24 @@ class Reference(SymbolicReference, LazyMixin, IterableObj):
refs/heads/master
:param check_path: if False, you can provide any path. Otherwise the path must start with the
default path prefix of this type."""
- if check_path and not str(path).startswith(self._common_path_default + '/'):
- raise ValueError(f"Cannot instantiate {self.__class__.__name__!r} from path {path}")
+ if check_path and not str(path).startswith(self._common_path_default + "/"):
+ raise ValueError(
+ f"Cannot instantiate {self.__class__.__name__!r} from path {path}"
+ )
self.path: str # SymbolicReference converts to string atm
super(Reference, self).__init__(repo, path)
def __str__(self) -> str:
return self.name
- #{ Interface
+ # { Interface
# @ReservedAssignment
- def set_object(self, object: Union[Commit_ish, 'SymbolicReference', str], logmsg: Union[str, None] = None
- ) -> 'Reference':
+ def set_object(
+ self,
+ object: Union[Commit_ish, "SymbolicReference", str],
+ logmsg: Union[str, None] = None,
+ ) -> "Reference":
"""Special version which checks if the head-log needs an update as well
:return: self"""
oldbinsha = None
@@ -102,21 +113,26 @@ class Reference(SymbolicReference, LazyMixin, IterableObj):
""":return: (shortest) Name of this reference - it may contain path components"""
# first two path tokens are can be removed as they are
# refs/heads or refs/tags or refs/remotes
- tokens = self.path.split('/')
+ tokens = self.path.split("/")
if len(tokens) < 3:
- return self.path # could be refs/HEAD
- return '/'.join(tokens[2:])
+ return self.path # could be refs/HEAD
+ return "/".join(tokens[2:])
@classmethod
- def iter_items(cls: Type[T_References], repo: 'Repo', common_path: Union[PathLike, None] = None,
- *args: Any, **kwargs: Any) -> Iterator[T_References]:
+ def iter_items(
+ cls: Type[T_References],
+ repo: "Repo",
+ common_path: Union[PathLike, None] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Iterator[T_References]:
"""Equivalent to SymbolicReference.iter_items, but will return non-detached
references as well."""
return cls._iter_items(repo, common_path)
- #}END interface
+ # }END interface
- #{ Remote Interface
+ # { Remote Interface
@property # type: ignore ## mypy cannot deal with properties with an extra decorator (2021-04-21)
@require_remote_ref_path
@@ -125,7 +141,7 @@ class Reference(SymbolicReference, LazyMixin, IterableObj):
:return:
Name of the remote we are a reference of, such as 'origin' for a reference
named 'origin/master'"""
- tokens = self.path.split('/')
+ tokens = self.path.split("/")
# /refs/remotes/<remote name>/<branch_name>
return tokens[2]
@@ -135,7 +151,7 @@ class Reference(SymbolicReference, LazyMixin, IterableObj):
""":return: Name of the remote head itself, i.e. master.
:note: The returned name is usually not qualified enough to uniquely identify
a branch"""
- tokens = self.path.split('/')
- return '/'.join(tokens[3:])
+ tokens = self.path.split("/")
+ return "/".join(tokens[3:])
- #} END remote interface
+ # } END remote interface
diff --git a/git/refs/remote.py b/git/refs/remote.py
index 1b416bd0..8ac6bcd2 100644
--- a/git/refs/remote.py
+++ b/git/refs/remote.py
@@ -23,12 +23,18 @@ if TYPE_CHECKING:
class RemoteReference(Head):
"""Represents a reference pointing to a remote head."""
+
_common_path_default = Head._remote_common_path_default
@classmethod
- def iter_items(cls, repo: 'Repo', common_path: Union[PathLike, None] = None,
- remote: Union['Remote', None] = None, *args: Any, **kwargs: Any
- ) -> Iterator['RemoteReference']:
+ def iter_items(
+ cls,
+ repo: "Repo",
+ common_path: Union[PathLike, None] = None,
+ remote: Union["Remote", None] = None,
+ *args: Any,
+ **kwargs: Any
+ ) -> Iterator["RemoteReference"]:
"""Iterate remote references, and if given, constrain them to the given remote"""
common_path = common_path or cls._common_path_default
if remote is not None:
@@ -41,9 +47,10 @@ class RemoteReference(Head):
# implementation does not. mypy doesn't have a way of representing
# tightening the types of arguments in subclasses and recommends Any or
# "type: ignore". (See https://github.com/python/typing/issues/241)
- @ classmethod
- def delete(cls, repo: 'Repo', *refs: 'RemoteReference', # type: ignore
- **kwargs: Any) -> None:
+ @classmethod
+ def delete(
+ cls, repo: "Repo", *refs: "RemoteReference", **kwargs: Any # type: ignore
+ ) -> None:
"""Delete the given remote references
:note:
@@ -64,7 +71,7 @@ class RemoteReference(Head):
pass
# END for each ref
- @ classmethod
+ @classmethod
def create(cls, *args: Any, **kwargs: Any) -> NoReturn:
"""Used to disable this method"""
raise TypeError("Cannot explicitly create remote references")
diff --git a/git/refs/symbolic.py b/git/refs/symbolic.py
index 8d869173..6d9ebb96 100644
--- a/git/refs/symbolic.py
+++ b/git/refs/symbolic.py
@@ -10,19 +10,26 @@ from git.util import (
to_native_path_linux,
assure_directory_exists,
hex_to_bin,
- LockedFD
-)
-from gitdb.exc import (
- BadObject,
- BadName
+ LockedFD,
)
+from gitdb.exc import BadObject, BadName
from .log import RefLog
# typing ------------------------------------------------------------------
-from typing import Any, Iterator, List, Tuple, Type, TypeVar, Union, TYPE_CHECKING, cast # NOQA
-from git.types import Commit_ish, PathLike # NOQA
+from typing import (
+ Any,
+ Iterator,
+ List,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ TYPE_CHECKING,
+ cast,
+) # NOQA
+from git.types import Commit_ish, PathLike # NOQA
if TYPE_CHECKING:
from git.repo import Repo
@@ -32,7 +39,7 @@ if TYPE_CHECKING:
from git.objects.commit import Actor
-T_References = TypeVar('T_References', bound='SymbolicReference')
+T_References = TypeVar("T_References", bound="SymbolicReference")
# ------------------------------------------------------------------------------
@@ -40,10 +47,10 @@ T_References = TypeVar('T_References', bound='SymbolicReference')
__all__ = ["SymbolicReference"]
-def _git_dir(repo: 'Repo', path: Union[PathLike, None]) -> PathLike:
- """ Find the git dir that's appropriate for the path"""
+def _git_dir(repo: "Repo", path: Union[PathLike, None]) -> PathLike:
+ """Find the git dir that's appropriate for the path"""
name = f"{path}"
- if name in ['HEAD', 'ORIG_HEAD', 'FETCH_HEAD', 'index', 'logs']:
+ if name in ["HEAD", "ORIG_HEAD", "FETCH_HEAD", "index", "logs"]:
return repo.git_dir
return repo.common_dir
@@ -55,6 +62,7 @@ class SymbolicReference(object):
specifies a commit.
A typical example for a symbolic reference is HEAD."""
+
__slots__ = ("repo", "path")
_resolve_ref_on_create = False
_points_to_commits_only = True
@@ -62,7 +70,7 @@ class SymbolicReference(object):
_remote_common_path_default = "refs/remotes"
_id_attribute_ = "name"
- def __init__(self, repo: 'Repo', path: PathLike, check_path: bool = False):
+ def __init__(self, repo: "Repo", path: PathLike, check_path: bool = False):
self.repo = repo
self.path = path
@@ -73,7 +81,7 @@ class SymbolicReference(object):
return '<git.%s "%s">' % (self.__class__.__name__, self.path)
def __eq__(self, other: object) -> bool:
- if hasattr(other, 'path'):
+ if hasattr(other, "path"):
other = cast(SymbolicReference, other)
return self.path == other.path
return False
@@ -97,20 +105,20 @@ class SymbolicReference(object):
return join_path_native(_git_dir(self.repo, self.path), self.path)
@classmethod
- def _get_packed_refs_path(cls, repo: 'Repo') -> str:
- return os.path.join(repo.common_dir, 'packed-refs')
+ def _get_packed_refs_path(cls, repo: "Repo") -> str:
+ return os.path.join(repo.common_dir, "packed-refs")
@classmethod
- def _iter_packed_refs(cls, repo: 'Repo') -> Iterator[Tuple[str, str]]:
+ def _iter_packed_refs(cls, repo: "Repo") -> Iterator[Tuple[str, str]]:
"""Returns an iterator yielding pairs of sha1/path pairs (as strings) for the corresponding refs.
:note: The packed refs file will be kept open as long as we iterate"""
try:
- with open(cls._get_packed_refs_path(repo), 'rt', encoding='UTF-8') as fp:
+ with open(cls._get_packed_refs_path(repo), "rt", encoding="UTF-8") as fp:
for line in fp:
line = line.strip()
if not line:
continue
- if line.startswith('#'):
+ if line.startswith("#"):
# "# pack-refs with: peeled fully-peeled sorted"
# the git source code shows "peeled",
# "fully-peeled" and "sorted" as the keywords
@@ -119,18 +127,23 @@ class SymbolicReference(object):
# I looked at master on 2017-10-11,
# commit 111ef79afe, after tag v2.15.0-rc1
# from repo https://github.com/git/git.git
- if line.startswith('# pack-refs with:') and 'peeled' not in line:
- raise TypeError("PackingType of packed-Refs not understood: %r" % line)
+ if (
+ line.startswith("# pack-refs with:")
+ and "peeled" not in line
+ ):
+ raise TypeError(
+ "PackingType of packed-Refs not understood: %r" % line
+ )
# END abort if we do not understand the packing scheme
continue
# END parse comment
# skip dereferenced tag object entries - previous line was actual
# tag reference for it
- if line[0] == '^':
+ if line[0] == "^":
continue
- yield cast(Tuple[str, str], tuple(line.split(' ', 1)))
+ yield cast(Tuple[str, str], tuple(line.split(" ", 1)))
# END for each line
except OSError:
return None
@@ -141,7 +154,9 @@ class SymbolicReference(object):
# alright.
@classmethod
- def dereference_recursive(cls, repo: 'Repo', ref_path: Union[PathLike, None]) -> str:
+ def dereference_recursive(
+ cls, repo: "Repo", ref_path: Union[PathLike, None]
+ ) -> str:
"""
:return: hexsha stored in the reference at the given ref_path, recursively dereferencing all
intermediate references as required
@@ -154,20 +169,23 @@ class SymbolicReference(object):
# END recursive dereferencing
@classmethod
- def _get_ref_info_helper(cls, repo: 'Repo', ref_path: Union[PathLike, None]
- ) -> Union[Tuple[str, None], Tuple[None, str]]:
+ def _get_ref_info_helper(
+ cls, repo: "Repo", ref_path: Union[PathLike, None]
+ ) -> Union[Tuple[str, None], Tuple[None, str]]:
"""Return: (str(sha), str(target_ref_path)) if available, the sha the file at
rela_path points to, or None. target_ref_path is the reference we
point to, or None"""
tokens: Union[None, List[str], Tuple[str, str]] = None
repodir = _git_dir(repo, ref_path)
try:
- with open(os.path.join(repodir, str(ref_path)), 'rt', encoding='UTF-8') as fp:
+ with open(
+ os.path.join(repodir, str(ref_path)), "rt", encoding="UTF-8"
+ ) as fp:
value = fp.read().rstrip()
# Don't only split on spaces, but on whitespace, which allows to parse lines like
# 60b64ef992065e2600bfef6187a97f92398a9144 branch 'master' of git-server:/path/to/repo
tokens = value.split()
- assert(len(tokens) != 0)
+ assert len(tokens) != 0
except OSError:
# Probably we are just packed, find our entry in the packed refs file
# NOTE: We are not a symbolic ref if we are in a packed file, as these
@@ -184,7 +202,7 @@ class SymbolicReference(object):
raise ValueError("Reference at %r does not exist" % ref_path)
# is it a reference ?
- if tokens[0] == 'ref:':
+ if tokens[0] == "ref:":
return (None, tokens[1])
# its a commit
@@ -194,7 +212,9 @@ class SymbolicReference(object):
raise ValueError("Failed to parse reference information from %r" % ref_path)
@classmethod
- def _get_ref_info(cls, repo: 'Repo', ref_path: Union[PathLike, None]) -> Union[Tuple[str, None], Tuple[None, str]]:
+ def _get_ref_info(
+ cls, repo: "Repo", ref_path: Union[PathLike, None]
+ ) -> Union[Tuple[str, None], Tuple[None, str]]:
"""Return: (str(sha), str(target_ref_path)) if available, the sha the file at
rela_path points to, or None. target_ref_path is the reference we
point to, or None"""
@@ -207,25 +227,32 @@ class SymbolicReference(object):
always point to the actual object as it gets re-created on each query"""
# have to be dynamic here as we may be a tag which can point to anything
# Our path will be resolved to the hexsha which will be used accordingly
- return Object.new_from_sha(self.repo, hex_to_bin(self.dereference_recursive(self.repo, self.path)))
+ return Object.new_from_sha(
+ self.repo, hex_to_bin(self.dereference_recursive(self.repo, self.path))
+ )
- def _get_commit(self) -> 'Commit':
+ def _get_commit(self) -> "Commit":
"""
:return:
Commit object we point to, works for detached and non-detached
SymbolicReferences. The symbolic reference will be dereferenced recursively."""
obj = self._get_object()
- if obj.type == 'tag':
+ if obj.type == "tag":
obj = obj.object
# END dereference tag
if obj.type != Commit.type:
- raise TypeError("Symbolic Reference pointed to object %r, commit was required" % obj)
+ raise TypeError(
+ "Symbolic Reference pointed to object %r, commit was required" % obj
+ )
# END handle type
return obj
- def set_commit(self, commit: Union[Commit, 'SymbolicReference', str], logmsg: Union[str, None] = None
- ) -> 'SymbolicReference':
+ def set_commit(
+ self,
+ commit: Union[Commit, "SymbolicReference", str],
+ logmsg: Union[str, None] = None,
+ ) -> "SymbolicReference":
"""As set_object, but restricts the type of object to be a Commit
:raise ValueError: If commit is not a Commit object or doesn't point to
@@ -254,8 +281,11 @@ class SymbolicReference(object):
return self
- def set_object(self, object: Union[Commit_ish, 'SymbolicReference', str], logmsg: Union[str, None] = None
- ) -> 'SymbolicReference':
+ def set_object(
+ self,
+ object: Union[Commit_ish, "SymbolicReference", str],
+ logmsg: Union[str, None] = None,
+ ) -> "SymbolicReference":
"""Set the object we point to, possibly dereference our symbolic reference first.
If the reference does not exist, it will be created
@@ -282,20 +312,25 @@ class SymbolicReference(object):
# set the commit on our reference
return self._get_reference().set_object(object, logmsg)
- commit = property(_get_commit, set_commit, doc="Query or set commits directly") # type: ignore
+ commit = property(_get_commit, set_commit, doc="Query or set commits directly") # type: ignore
object = property(_get_object, set_object, doc="Return the object our ref currently refers to") # type: ignore
- def _get_reference(self) -> 'SymbolicReference':
+ def _get_reference(self) -> "SymbolicReference":
""":return: Reference Object we point to
:raise TypeError: If this symbolic reference is detached, hence it doesn't point
to a reference, but to a commit"""
sha, target_ref_path = self._get_ref_info(self.repo, self.path)
if target_ref_path is None:
- raise TypeError("%s is a detached symbolic reference as it points to %r" % (self, sha))
+ raise TypeError(
+ "%s is a detached symbolic reference as it points to %r" % (self, sha)
+ )
return self.from_path(self.repo, target_ref_path)
- def set_reference(self, ref: Union[Commit_ish, 'SymbolicReference', str],
- logmsg: Union[str, None] = None) -> 'SymbolicReference':
+ def set_reference(
+ self,
+ ref: Union[Commit_ish, "SymbolicReference", str],
+ logmsg: Union[str, None] = None,
+ ) -> "SymbolicReference":
"""Set ourselves to the given ref. It will stay a symbol if the ref is a Reference.
Otherwise an Object, given as Object instance or refspec, is assumed and if valid,
will be set which effectively detaches the reference if it was a purely
@@ -322,7 +357,7 @@ class SymbolicReference(object):
write_value = ref.hexsha
elif isinstance(ref, str):
try:
- obj = self.repo.rev_parse(ref + "^{}") # optionally deref tags
+ obj = self.repo.rev_parse(ref + "^{}") # optionally deref tags
write_value = obj.hexsha
except (BadObject, BadName) as e:
raise ValueError("Could not extract object from %s" % ref) from e
@@ -336,7 +371,7 @@ class SymbolicReference(object):
raise TypeError("Require commit, got %r" % obj)
# END verify type
- oldbinsha: bytes = b''
+ oldbinsha: bytes = b""
if logmsg is not None:
try:
oldbinsha = self.commit.binsha
@@ -352,7 +387,7 @@ class SymbolicReference(object):
fd = lfd.open(write=True, stream=True)
ok = True
try:
- fd.write(write_value.encode('utf-8') + b'\n')
+ fd.write(write_value.encode("utf-8") + b"\n")
lfd.commit()
ok = True
finally:
@@ -365,7 +400,7 @@ class SymbolicReference(object):
return self
# aliased reference
- reference: Union['Head', 'TagReference', 'RemoteReference', 'Reference']
+ reference: Union["Head", "TagReference", "RemoteReference", "Reference"]
reference = property(_get_reference, set_reference, doc="Returns the Reference we point to") # type: ignore
ref = reference
@@ -393,7 +428,7 @@ class SymbolicReference(object):
except TypeError:
return True
- def log(self) -> 'RefLog':
+ def log(self) -> "RefLog":
"""
:return: RefLog for this reference. Its last entry reflects the latest change
applied to this reference
@@ -402,8 +437,12 @@ class SymbolicReference(object):
instead of calling this method repeatedly. It should be considered read-only."""
return RefLog.from_file(RefLog.path(self))
- def log_append(self, oldbinsha: bytes, message: Union[str, None],
- newbinsha: Union[bytes, None] = None) -> 'RefLogEntry':
+ def log_append(
+ self,
+ oldbinsha: bytes,
+ message: Union[str, None],
+ newbinsha: Union[bytes, None] = None,
+ ) -> "RefLogEntry":
"""Append a logentry to the logfile of this ref
:param oldbinsha: binary sha this ref used to point to
@@ -415,7 +454,9 @@ class SymbolicReference(object):
# correct to allow overriding the committer on a per-commit level.
# See https://github.com/gitpython-developers/GitPython/pull/146
try:
- committer_or_reader: Union['Actor', 'GitConfigParser'] = self.commit.committer
+ committer_or_reader: Union[
+ "Actor", "GitConfigParser"
+ ] = self.commit.committer
except ValueError:
committer_or_reader = self.repo.config_reader()
# end handle newly cloned repositories
@@ -423,11 +464,13 @@ class SymbolicReference(object):
newbinsha = self.commit.binsha
if message is None:
- message = ''
+ message = ""
- return RefLog.append_entry(committer_or_reader, RefLog.path(self), oldbinsha, newbinsha, message)
+ return RefLog.append_entry(
+ committer_or_reader, RefLog.path(self), oldbinsha, newbinsha, message
+ )
- def log_entry(self, index: int) -> 'RefLogEntry':
+ def log_entry(self, index: int) -> "RefLogEntry":
""":return: RefLogEntry at the given index
:param index: python list compatible positive or negative index
@@ -437,7 +480,7 @@ class SymbolicReference(object):
return RefLog.entry_at(RefLog.path(self), index)
@classmethod
- def to_full_path(cls, path: Union[PathLike, 'SymbolicReference']) -> PathLike:
+ def to_full_path(cls, path: Union[PathLike, "SymbolicReference"]) -> PathLike:
"""
:return: string with a full repository-relative path which can be used to initialize
a Reference instance, for instance by using ``Reference.from_path``"""
@@ -447,11 +490,11 @@ class SymbolicReference(object):
if not cls._common_path_default:
return full_ref_path
if not str(path).startswith(cls._common_path_default + "/"):
- full_ref_path = '%s/%s' % (cls._common_path_default, path)
+ full_ref_path = "%s/%s" % (cls._common_path_default, path)
return full_ref_path
@classmethod
- def delete(cls, repo: 'Repo', path: PathLike) -> None:
+ def delete(cls, repo: "Repo", path: PathLike) -> None:
"""Delete the reference at the given path
:param repo:
@@ -469,20 +512,23 @@ class SymbolicReference(object):
# check packed refs
pack_file_path = cls._get_packed_refs_path(repo)
try:
- with open(pack_file_path, 'rb') as reader:
+ with open(pack_file_path, "rb") as reader:
new_lines = []
made_change = False
dropped_last_line = False
for line_bytes in reader:
line = line_bytes.decode(defenc)
- _, _, line_ref = line.partition(' ')
+ _, _, line_ref = line.partition(" ")
line_ref = line_ref.strip()
# keep line if it is a comment or if the ref to delete is not
# in the line
# If we deleted the last line and this one is a tag-reference object,
# we drop it as well
- if (line.startswith('#') or full_ref_path != line_ref) and \
- (not dropped_last_line or dropped_last_line and not line.startswith('^')):
+ if (line.startswith("#") or full_ref_path != line_ref) and (
+ not dropped_last_line
+ or dropped_last_line
+ and not line.startswith("^")
+ ):
new_lines.append(line)
dropped_last_line = False
continue
@@ -496,7 +542,7 @@ class SymbolicReference(object):
if made_change:
# write-binary is required, otherwise windows will
# open the file in text mode and change LF to CRLF !
- with open(pack_file_path, 'wb') as fd:
+ with open(pack_file_path, "wb") as fd:
fd.writelines(line.encode(defenc) for line in new_lines)
except OSError:
@@ -509,9 +555,15 @@ class SymbolicReference(object):
# END remove reflog
@classmethod
- def _create(cls: Type[T_References], repo: 'Repo', path: PathLike, resolve: bool,
- reference: Union['SymbolicReference', str], force: bool,
- logmsg: Union[str, None] = None) -> T_References:
+ def _create(
+ cls: Type[T_References],
+ repo: "Repo",
+ path: PathLike,
+ resolve: bool,
+ reference: Union["SymbolicReference", str],
+ force: bool,
+ logmsg: Union[str, None] = None,
+ ) -> T_References:
"""internal method used to create a new symbolic reference.
If resolve is False, the reference will be taken as is, creating
a proper symbolic reference. Otherwise it will be resolved to the
@@ -532,11 +584,13 @@ class SymbolicReference(object):
target_data = str(target.path)
if not resolve:
target_data = "ref: " + target_data
- with open(abs_ref_path, 'rb') as fd:
+ with open(abs_ref_path, "rb") as fd:
existing_data = fd.read().decode(defenc).strip()
if existing_data != target_data:
- raise OSError("Reference at %r does already exist, pointing to %r, requested was %r" %
- (full_ref_path, existing_data, target_data))
+ raise OSError(
+ "Reference at %r does already exist, pointing to %r, requested was %r"
+ % (full_ref_path, existing_data, target_data)
+ )
# END no force handling
ref = cls(repo, full_ref_path)
@@ -544,9 +598,15 @@ class SymbolicReference(object):
return ref
@classmethod
- def create(cls: Type[T_References], repo: 'Repo', path: PathLike,
- reference: Union['SymbolicReference', str] = 'HEAD',
- logmsg: Union[str, None] = None, force: bool = False, **kwargs: Any) -> T_References:
+ def create(
+ cls: Type[T_References],
+ repo: "Repo",
+ path: PathLike,
+ reference: Union["SymbolicReference", str] = "HEAD",
+ logmsg: Union[str, None] = None,
+ force: bool = False,
+ **kwargs: Any,
+ ) -> T_References:
"""Create a new symbolic reference, hence a reference pointing , to another reference.
:param repo:
@@ -575,9 +635,11 @@ class SymbolicReference(object):
already exists.
:note: This does not alter the current HEAD, index or Working Tree"""
- return cls._create(repo, path, cls._resolve_ref_on_create, reference, force, logmsg)
+ return cls._create(
+ repo, path, cls._resolve_ref_on_create, reference, force, logmsg
+ )
- def rename(self, new_path: PathLike, force: bool = False) -> 'SymbolicReference':
+ def rename(self, new_path: PathLike, force: bool = False) -> "SymbolicReference":
"""Rename self to a new path
:param new_path:
@@ -590,7 +652,7 @@ class SymbolicReference(object):
already exists. It will be overwritten in that case
:return: self
- :raise OSError: In case a file at path but a different contents already exists """
+ :raise OSError: In case a file at path but a different contents already exists"""
new_path = self.to_full_path(new_path)
if self.path == new_path:
return self
@@ -600,9 +662,9 @@ class SymbolicReference(object):
if os.path.isfile(new_abs_path):
if not force:
# if they point to the same file, its not an error
- with open(new_abs_path, 'rb') as fd1:
+ with open(new_abs_path, "rb") as fd1:
f1 = fd1.read().strip()
- with open(cur_abs_path, 'rb') as fd2:
+ with open(cur_abs_path, "rb") as fd2:
f2 = fd2.read().strip()
if f1 != f2:
raise OSError("File at path %r already exists" % new_abs_path)
@@ -623,26 +685,31 @@ class SymbolicReference(object):
return self
@classmethod
- def _iter_items(cls: Type[T_References], repo: 'Repo', common_path: Union[PathLike, None] = None
- ) -> Iterator[T_References]:
+ def _iter_items(
+ cls: Type[T_References], repo: "Repo", common_path: Union[PathLike, None] = None
+ ) -> Iterator[T_References]:
if common_path is None:
common_path = cls._common_path_default
rela_paths = set()
# walk loose refs
# Currently we do not follow links
- for root, dirs, files in os.walk(join_path_native(repo.common_dir, common_path)):
- if 'refs' not in root.split(os.sep): # skip non-refs subfolders
- refs_id = [d for d in dirs if d == 'refs']
+ for root, dirs, files in os.walk(
+ join_path_native(repo.common_dir, common_path)
+ ):
+ if "refs" not in root.split(os.sep): # skip non-refs subfolders
+ refs_id = [d for d in dirs if d == "refs"]
if refs_id:
- dirs[0:] = ['refs']
+ dirs[0:] = ["refs"]
# END prune non-refs folders
for f in files:
- if f == 'packed-refs':
+ if f == "packed-refs":
continue
abs_path = to_native_path_linux(join_path(root, f))
- rela_paths.add(abs_path.replace(to_native_path_linux(repo.common_dir) + '/', ""))
+ rela_paths.add(
+ abs_path.replace(to_native_path_linux(repo.common_dir) + "/", "")
+ )
# END for each file in root directory
# END for each directory to walk
@@ -662,8 +729,13 @@ class SymbolicReference(object):
# END for each sorted relative refpath
@classmethod
- def iter_items(cls: Type[T_References], repo: 'Repo', common_path: Union[PathLike, None] = None,
- *args: Any, **kwargs: Any) -> Iterator[T_References]:
+ def iter_items(
+ cls: Type[T_References],
+ repo: "Repo",
+ common_path: Union[PathLike, None] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Iterator[T_References]:
"""Find all refs in the repository
:param repo: is the Repo
@@ -680,10 +752,16 @@ class SymbolicReference(object):
List is lexicographically sorted
The returned objects represent actual subclasses, such as Head or TagReference"""
- return (r for r in cls._iter_items(repo, common_path) if r.__class__ == SymbolicReference or not r.is_detached)
+ return (
+ r
+ for r in cls._iter_items(repo, common_path)
+ if r.__class__ == SymbolicReference or not r.is_detached
+ )
@classmethod
- def from_path(cls: Type[T_References], repo: 'Repo', path: PathLike) -> T_References:
+ def from_path(
+ cls: Type[T_References], repo: "Repo", path: PathLike
+ ) -> T_References:
"""
:param path: full .git-directory-relative path name to the Reference to instantiate
:note: use to_full_path() if you only have a partial path of a known Reference Type
@@ -696,7 +774,15 @@ class SymbolicReference(object):
# Names like HEAD are inserted after the refs module is imported - we have an import dependency
# cycle and don't want to import these names in-function
from . import HEAD, Head, RemoteReference, TagReference, Reference
- for ref_type in (HEAD, Head, RemoteReference, TagReference, Reference, SymbolicReference):
+
+ for ref_type in (
+ HEAD,
+ Head,
+ RemoteReference,
+ TagReference,
+ Reference,
+ SymbolicReference,
+ ):
try:
instance: T_References
instance = ref_type(repo, path)
@@ -709,7 +795,9 @@ class SymbolicReference(object):
pass
# END exception handling
# END for each type to try
- raise ValueError("Could not find reference type suitable to handle path %r" % path)
+ raise ValueError(
+ "Could not find reference type suitable to handle path %r" % path
+ )
def is_remote(self) -> bool:
""":return: True if this symbolic reference points to a remote branch"""
diff --git a/git/refs/tag.py b/git/refs/tag.py
index 8cc79edd..96494148 100644
--- a/git/refs/tag.py
+++ b/git/refs/tag.py
@@ -36,22 +36,27 @@ class TagReference(Reference):
_common_path_default = Reference._common_path_default + "/" + _common_default
@property
- def commit(self) -> 'Commit': # type: ignore[override] # LazyMixin has unrelated commit method
+ def commit(self) -> "Commit": # type: ignore[override] # LazyMixin has unrelated commit method
""":return: Commit object the tag ref points to
:raise ValueError: if the tag points to a tree or blob"""
obj = self.object
- while obj.type != 'commit':
+ while obj.type != "commit":
if obj.type == "tag":
# it is a tag object which carries the commit as an object - we can point to anything
obj = obj.object
else:
- raise ValueError(("Cannot resolve commit as tag %s points to a %s object - " +
- "use the `.object` property instead to access it") % (self, obj.type))
+ raise ValueError(
+ (
+ "Cannot resolve commit as tag %s points to a %s object - "
+ + "use the `.object` property instead to access it"
+ )
+ % (self, obj.type)
+ )
return obj
@property
- def tag(self) -> Union['TagObject', None]:
+ def tag(self) -> Union["TagObject", None]:
"""
:return: Tag object this tag ref points to or None in case
we are a light weight tag"""
@@ -69,10 +74,15 @@ class TagReference(Reference):
return Reference._get_object(self)
@classmethod
- def create(cls: Type['TagReference'], repo: 'Repo', path: PathLike,
- reference: Union[str, 'SymbolicReference'] = 'HEAD',
- logmsg: Union[str, None] = None,
- force: bool = False, **kwargs: Any) -> 'TagReference':
+ def create(
+ cls: Type["TagReference"],
+ repo: "Repo",
+ path: PathLike,
+ reference: Union[str, "SymbolicReference"] = "HEAD",
+ logmsg: Union[str, None] = None,
+ force: bool = False,
+ **kwargs: Any
+ ) -> "TagReference":
"""Create a new tag reference.
:param path:
@@ -100,16 +110,16 @@ class TagReference(Reference):
Additional keyword arguments to be passed to git-tag
:return: A new TagReference"""
- if 'ref' in kwargs and kwargs['ref']:
- reference = kwargs['ref']
+ if "ref" in kwargs and kwargs["ref"]:
+ reference = kwargs["ref"]
if logmsg:
- kwargs['m'] = logmsg
- elif 'message' in kwargs and kwargs['message']:
- kwargs['m'] = kwargs['message']
+ kwargs["m"] = logmsg
+ elif "message" in kwargs and kwargs["message"]:
+ kwargs["m"] = kwargs["message"]
if force:
- kwargs['f'] = True
+ kwargs["f"] = True
args = (path, reference)
@@ -117,7 +127,7 @@ class TagReference(Reference):
return TagReference(repo, "%s/%s" % (cls._common_path_default, path))
@classmethod
- def delete(cls, repo: 'Repo', *tags: 'TagReference') -> None: # type: ignore[override]
+ def delete(cls, repo: "Repo", *tags: "TagReference") -> None: # type: ignore[override]
"""Delete the given existing tag or tags"""
repo.git.tag("-d", *tags)
diff --git a/git/remote.py b/git/remote.py
index 56f3c5b3..8cd79057 100644
--- a/git/remote.py
+++ b/git/remote.py
@@ -9,7 +9,7 @@ import logging
import re
from git.cmd import handle_process_output, Git
-from git.compat import (defenc, force_text)
+from git.compat import defenc, force_text
from git.exc import GitCommandError
from git.util import (
LazyMixin,
@@ -27,28 +27,36 @@ from git.config import (
SectionConstraint,
cp,
)
-from git.refs import (
- Head,
- Reference,
- RemoteReference,
- SymbolicReference,
- TagReference
-)
+from git.refs import Head, Reference, RemoteReference, SymbolicReference, TagReference
# typing-------------------------------------------------------
-from typing import (Any, Callable, Dict, Iterator, List, NoReturn, Optional, Sequence,
- TYPE_CHECKING, Type, Union, cast, overload)
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ NoReturn,
+ Optional,
+ Sequence,
+ TYPE_CHECKING,
+ Type,
+ Union,
+ cast,
+ overload,
+)
from git.types import PathLike, Literal, Commit_ish
if TYPE_CHECKING:
from git.repo.base import Repo
from git.objects.submodule.base import UpdateProgress
+
# from git.objects.commit import Commit
# from git.objects import Blob, Tree, TagObject
-flagKeyLiteral = Literal[' ', '!', '+', '-', '*', '=', 't', '?']
+flagKeyLiteral = Literal[" ", "!", "+", "-", "*", "=", "t", "?"]
# def is_flagKeyLiteral(inp: str) -> TypeGuard[flagKeyLiteral]:
# return inp in [' ', '!', '+', '-', '=', '*', 't', '?']
@@ -57,18 +65,22 @@ flagKeyLiteral = Literal[' ', '!', '+', '-', '*', '=', 't', '?']
# -------------------------------------------------------------
-log = logging.getLogger('git.remote')
+log = logging.getLogger("git.remote")
log.addHandler(logging.NullHandler())
-__all__ = ('RemoteProgress', 'PushInfo', 'FetchInfo', 'Remote')
+__all__ = ("RemoteProgress", "PushInfo", "FetchInfo", "Remote")
-#{ Utilities
+# { Utilities
-def add_progress(kwargs: Any, git: Git,
- progress: Union[RemoteProgress, 'UpdateProgress', Callable[..., RemoteProgress], None]
- ) -> Any:
+def add_progress(
+ kwargs: Any,
+ git: Git,
+ progress: Union[
+ RemoteProgress, "UpdateProgress", Callable[..., RemoteProgress], None
+ ],
+) -> Any:
"""Add the --progress flag to the given kwargs dict if supported by the
git command. If the actual progress in the given progress instance is not
given, we do not request any progress
@@ -76,31 +88,33 @@ def add_progress(kwargs: Any, git: Git,
if progress is not None:
v = git.version_info[:2]
if v >= (1, 7):
- kwargs['progress'] = True
+ kwargs["progress"] = True
# END handle --progress
# END handle progress
return kwargs
-#} END utilities
+
+# } END utilities
-@ overload
+@overload
def to_progress_instance(progress: None) -> RemoteProgress:
...
-@ overload
+@overload
def to_progress_instance(progress: Callable[..., Any]) -> CallableRemoteProgress:
...
-@ overload
+@overload
def to_progress_instance(progress: RemoteProgress) -> RemoteProgress:
...
-def to_progress_instance(progress: Union[Callable[..., Any], RemoteProgress, None]
- ) -> Union[RemoteProgress, CallableRemoteProgress]:
+def to_progress_instance(
+ progress: Union[Callable[..., Any], RemoteProgress, None]
+) -> Union[RemoteProgress, CallableRemoteProgress]:
"""Given the 'progress' return a suitable object derived from
RemoteProgress().
"""
@@ -130,25 +144,53 @@ class PushInfo(IterableObj, object):
info.old_commit # commit at which the remote_ref was standing before we pushed
# it to local_ref.commit. Will be None if an error was indicated
info.summary # summary line providing human readable english text about the push
- """
- __slots__ = ('local_ref', 'remote_ref_string', 'flags', '_old_commit_sha', '_remote', 'summary')
- _id_attribute_ = 'pushinfo'
-
- NEW_TAG, NEW_HEAD, NO_MATCH, REJECTED, REMOTE_REJECTED, REMOTE_FAILURE, DELETED, \
- FORCED_UPDATE, FAST_FORWARD, UP_TO_DATE, ERROR = [1 << x for x in range(11)]
-
- _flag_map = {'X': NO_MATCH,
- '-': DELETED,
- '*': 0,
- '+': FORCED_UPDATE,
- ' ': FAST_FORWARD,
- '=': UP_TO_DATE,
- '!': ERROR}
-
- def __init__(self, flags: int, local_ref: Union[SymbolicReference, None], remote_ref_string: str, remote: 'Remote',
- old_commit: Optional[str] = None, summary: str = '') -> None:
- """ Initialize a new instance
- local_ref: HEAD | Head | RemoteReference | TagReference | Reference | SymbolicReference | None """
+ """
+
+ __slots__ = (
+ "local_ref",
+ "remote_ref_string",
+ "flags",
+ "_old_commit_sha",
+ "_remote",
+ "summary",
+ )
+ _id_attribute_ = "pushinfo"
+
+ (
+ NEW_TAG,
+ NEW_HEAD,
+ NO_MATCH,
+ REJECTED,
+ REMOTE_REJECTED,
+ REMOTE_FAILURE,
+ DELETED,
+ FORCED_UPDATE,
+ FAST_FORWARD,
+ UP_TO_DATE,
+ ERROR,
+ ) = [1 << x for x in range(11)]
+
+ _flag_map = {
+ "X": NO_MATCH,
+ "-": DELETED,
+ "*": 0,
+ "+": FORCED_UPDATE,
+ " ": FAST_FORWARD,
+ "=": UP_TO_DATE,
+ "!": ERROR,
+ }
+
+ def __init__(
+ self,
+ flags: int,
+ local_ref: Union[SymbolicReference, None],
+ remote_ref_string: str,
+ remote: "Remote",
+ old_commit: Optional[str] = None,
+ summary: str = "",
+ ) -> None:
+ """Initialize a new instance
+ local_ref: HEAD | Head | RemoteReference | TagReference | Reference | SymbolicReference | None"""
self.flags = flags
self.local_ref = local_ref
self.remote_ref_string = remote_ref_string
@@ -156,11 +198,15 @@ class PushInfo(IterableObj, object):
self._old_commit_sha = old_commit
self.summary = summary
- @ property
+ @property
def old_commit(self) -> Union[str, SymbolicReference, Commit_ish, None]:
- return self._old_commit_sha and self._remote.repo.commit(self._old_commit_sha) or None
+ return (
+ self._old_commit_sha
+ and self._remote.repo.commit(self._old_commit_sha)
+ or None
+ )
- @ property
+ @property
def remote_ref(self) -> Union[RemoteReference, TagReference]:
"""
:return:
@@ -171,27 +217,33 @@ class PushInfo(IterableObj, object):
return TagReference(self._remote.repo, self.remote_ref_string)
elif self.remote_ref_string.startswith("refs/heads"):
remote_ref = Reference(self._remote.repo, self.remote_ref_string)
- return RemoteReference(self._remote.repo, "refs/remotes/%s/%s" % (str(self._remote), remote_ref.name))
+ return RemoteReference(
+ self._remote.repo,
+ "refs/remotes/%s/%s" % (str(self._remote), remote_ref.name),
+ )
else:
raise ValueError("Could not handle remote ref: %r" % self.remote_ref_string)
# END
- @ classmethod
- def _from_line(cls, remote: 'Remote', line: str) -> 'PushInfo':
+ @classmethod
+ def _from_line(cls, remote: "Remote", line: str) -> "PushInfo":
"""Create a new PushInfo instance as parsed from line which is expected to be like
- refs/heads/master:refs/heads/master 05d2687..1d0568e as bytes"""
- control_character, from_to, summary = line.split('\t', 3)
+ refs/heads/master:refs/heads/master 05d2687..1d0568e as bytes"""
+ control_character, from_to, summary = line.split("\t", 3)
flags = 0
# control character handling
try:
flags |= cls._flag_map[control_character]
except KeyError as e:
- raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line)) from e
+ raise ValueError(
+ "Control character %r unknown as parsed from line %r"
+ % (control_character, line)
+ ) from e
# END handle control character
# from_to handling
- from_ref_string, to_ref_string = from_to.split(':')
+ from_ref_string, to_ref_string = from_to.split(":")
if flags & cls.DELETED:
from_ref: Union[SymbolicReference, None] = None
else:
@@ -202,7 +254,7 @@ class PushInfo(IterableObj, object):
# commit handling, could be message or commit info
old_commit: Optional[str] = None
- if summary.startswith('['):
+ if summary.startswith("["):
if "[rejected]" in summary:
flags |= cls.REJECTED
elif "[remote rejected]" in summary:
@@ -222,25 +274,26 @@ class PushInfo(IterableObj, object):
split_token = "..."
if control_character == " ":
split_token = ".."
- old_sha, _new_sha = summary.split(' ')[0].split(split_token)
+ old_sha, _new_sha = summary.split(" ")[0].split(split_token)
# have to use constructor here as the sha usually is abbreviated
old_commit = old_sha
# END message handling
return PushInfo(flags, from_ref, to_ref_string, remote, old_commit, summary)
- @ classmethod
- def iter_items(cls, repo: 'Repo', *args: Any, **kwargs: Any
- ) -> NoReturn: # -> Iterator['PushInfo']:
+ @classmethod
+ def iter_items(
+ cls, repo: "Repo", *args: Any, **kwargs: Any
+ ) -> NoReturn: # -> Iterator['PushInfo']:
raise NotImplementedError
class PushInfoList(IterableList[PushInfo]):
- def __new__(cls) -> 'PushInfoList':
- return cast(PushInfoList, IterableList.__new__(cls, 'push_infos'))
+ def __new__(cls) -> "PushInfoList":
+ return cast(PushInfoList, IterableList.__new__(cls, "push_infos"))
def __init__(self) -> None:
- super().__init__('push_infos')
+ super().__init__("push_infos")
self.error: Optional[Exception] = None
def raise_if_error(self) -> None:
@@ -267,24 +320,35 @@ class FetchInfo(IterableObj, object):
# field is set to the previous location of ref, otherwise None
info.remote_ref_path # The path from which we fetched on the remote. It's the remote's version of our info.ref
"""
- __slots__ = ('ref', 'old_commit', 'flags', 'note', 'remote_ref_path')
- _id_attribute_ = 'fetchinfo'
- NEW_TAG, NEW_HEAD, HEAD_UPTODATE, TAG_UPDATE, REJECTED, FORCED_UPDATE, \
- FAST_FORWARD, ERROR = [1 << x for x in range(8)]
+ __slots__ = ("ref", "old_commit", "flags", "note", "remote_ref_path")
+ _id_attribute_ = "fetchinfo"
- _re_fetch_result = re.compile(r'^\s*(.) (\[[\w\s\.$@]+\]|[\w\.$@]+)\s+(.+) -> ([^\s]+)( \(.*\)?$)?')
+ (
+ NEW_TAG,
+ NEW_HEAD,
+ HEAD_UPTODATE,
+ TAG_UPDATE,
+ REJECTED,
+ FORCED_UPDATE,
+ FAST_FORWARD,
+ ERROR,
+ ) = [1 << x for x in range(8)]
+
+ _re_fetch_result = re.compile(
+ r"^\s*(.) (\[[\w\s\.$@]+\]|[\w\.$@]+)\s+(.+) -> ([^\s]+)( \(.*\)?$)?"
+ )
_flag_map: Dict[flagKeyLiteral, int] = {
- '!': ERROR,
- '+': FORCED_UPDATE,
- '*': 0,
- '=': HEAD_UPTODATE,
- ' ': FAST_FORWARD,
- '-': TAG_UPDATE,
+ "!": ERROR,
+ "+": FORCED_UPDATE,
+ "*": 0,
+ "=": HEAD_UPTODATE,
+ " ": FAST_FORWARD,
+ "-": TAG_UPDATE,
}
- @ classmethod
+ @classmethod
def refresh(cls) -> Literal[True]:
"""This gets called by the refresh function (see the top level
__init__).
@@ -308,9 +372,14 @@ class FetchInfo(IterableObj, object):
return True
- def __init__(self, ref: SymbolicReference, flags: int, note: str = '',
- old_commit: Union[Commit_ish, None] = None,
- remote_ref_path: Optional[PathLike] = None) -> None:
+ def __init__(
+ self,
+ ref: SymbolicReference,
+ flags: int,
+ note: str = "",
+ old_commit: Union[Commit_ish, None] = None,
+ remote_ref_path: Optional[PathLike] = None,
+ ) -> None:
"""
Initialize a new instance
"""
@@ -323,18 +392,18 @@ class FetchInfo(IterableObj, object):
def __str__(self) -> str:
return self.name
- @ property
+ @property
def name(self) -> str:
""":return: Name of our remote ref"""
return self.ref.name
- @ property
+ @property
def commit(self) -> Commit_ish:
""":return: Commit of our remote ref"""
return self.ref.commit
- @ classmethod
- def _from_line(cls, repo: 'Repo', line: str, fetch_line: str) -> 'FetchInfo':
+ @classmethod
+ def _from_line(cls, repo: "Repo", line: str, fetch_line: str) -> "FetchInfo":
"""Parse information from the given line as returned by git-fetch -v
and return a new FetchInfo object representing this information.
@@ -357,12 +426,18 @@ class FetchInfo(IterableObj, object):
# parse lines
remote_local_ref_str: str
- control_character, operation, local_remote_ref, remote_local_ref_str, note = match.groups()
+ (
+ control_character,
+ operation,
+ local_remote_ref,
+ remote_local_ref_str,
+ note,
+ ) = match.groups()
# assert is_flagKeyLiteral(control_character), f"{control_character}"
control_character = cast(flagKeyLiteral, control_character)
try:
_new_hex_sha, _fetch_operation, fetch_note = fetch_line.split("\t")
- ref_type_name, fetch_note = fetch_note.split(' ', 1)
+ ref_type_name, fetch_note = fetch_note.split(" ", 1)
except ValueError as e: # unpack error
raise ValueError("Failed to parse FETCH_HEAD line: %r" % fetch_line) from e
@@ -371,25 +446,28 @@ class FetchInfo(IterableObj, object):
try:
flags |= cls._flag_map[control_character]
except KeyError as e:
- raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line)) from e
+ raise ValueError(
+ "Control character %r unknown as parsed from line %r"
+ % (control_character, line)
+ ) from e
# END control char exception handling
# parse operation string for more info - makes no sense for symbolic refs, but we parse it anyway
old_commit: Union[Commit_ish, None] = None
is_tag_operation = False
- if 'rejected' in operation:
+ if "rejected" in operation:
flags |= cls.REJECTED
- if 'new tag' in operation:
+ if "new tag" in operation:
flags |= cls.NEW_TAG
is_tag_operation = True
- if 'tag update' in operation:
+ if "tag update" in operation:
flags |= cls.TAG_UPDATE
is_tag_operation = True
- if 'new branch' in operation:
+ if "new branch" in operation:
flags |= cls.NEW_HEAD
- if '...' in operation or '..' in operation:
- split_token = '...'
- if control_character == ' ':
+ if "..." in operation or ".." in operation:
+ split_token = "..."
+ if control_character == " ":
split_token = split_token[:-1]
old_commit = repo.rev_parse(operation.split(split_token)[0])
# END handle refspec
@@ -409,7 +487,7 @@ class FetchInfo(IterableObj, object):
# note: remote-tracking is just the first part of the 'remote-tracking branch' token.
# We don't parse it correctly, but its enough to know what to do, and its new in git 1.7something
ref_type = RemoteReference
- elif '/' in ref_type_name:
+ elif "/" in ref_type_name:
# If the fetch spec look something like this '+refs/pull/*:refs/heads/pull/*', and is thus pretty
# much anything the user wants, we will have trouble to determine what's going on
# For now, we assume the local ref is a Head
@@ -434,15 +512,23 @@ class FetchInfo(IterableObj, object):
# always use actual type if we get absolute paths
# Will always be the case if something is fetched outside of refs/remotes (if its not a tag)
ref_path = remote_local_ref_str
- if ref_type is not TagReference and not \
- remote_local_ref_str.startswith(RemoteReference._common_path_default + "/"):
+ if (
+ ref_type is not TagReference
+ and not remote_local_ref_str.startswith(
+ RemoteReference._common_path_default + "/"
+ )
+ ):
ref_type = Reference
# END downgrade remote reference
- elif ref_type is TagReference and 'tags/' in remote_local_ref_str:
+ elif ref_type is TagReference and "tags/" in remote_local_ref_str:
# even though its a tag, it is located in refs/remotes
- ref_path = join_path(RemoteReference._common_path_default, remote_local_ref_str)
+ ref_path = join_path(
+ RemoteReference._common_path_default, remote_local_ref_str
+ )
else:
- ref_path = join_path(ref_type._common_path_default, remote_local_ref_str)
+ ref_path = join_path(
+ ref_type._common_path_default, remote_local_ref_str
+ )
# END obtain refpath
# even though the path could be within the git conventions, we make
@@ -450,13 +536,14 @@ class FetchInfo(IterableObj, object):
remote_local_ref = ref_type(repo, ref_path, check_path=False)
# END create ref instance
- note = (note and note.strip()) or ''
+ note = (note and note.strip()) or ""
return cls(remote_local_ref, flags, note, old_commit, local_remote_ref)
- @ classmethod
- def iter_items(cls, repo: 'Repo', *args: Any, **kwargs: Any
- ) -> NoReturn: # -> Iterator['FetchInfo']:
+ @classmethod
+ def iter_items(
+ cls, repo: "Repo", *args: Any, **kwargs: Any
+ ) -> NoReturn: # -> Iterator['FetchInfo']:
raise NotImplementedError
@@ -473,7 +560,7 @@ class Remote(LazyMixin, IterableObj):
__slots__ = ("repo", "name", "_config_reader")
_id_attribute_ = "name"
- def __init__(self, repo: 'Repo', name: str) -> None:
+ def __init__(self, repo: "Repo", name: str) -> None:
"""Initialize a remote instance
:param repo: The repository we are a remote of
@@ -503,7 +590,9 @@ class Remote(LazyMixin, IterableObj):
if attr == "_config_reader":
# NOTE: This is cached as __getattr__ is overridden to return remote config values implicitly, such as
# in print(r.pushurl)
- self._config_reader = SectionConstraint(self.repo.config_reader("repository"), self._config_section_name())
+ self._config_reader = SectionConstraint(
+ self.repo.config_reader("repository"), self._config_section_name()
+ )
else:
super(Remote, self)._set_cache_(attr)
@@ -527,7 +616,7 @@ class Remote(LazyMixin, IterableObj):
:return: True if this is a valid, existing remote.
Valid remotes have an entry in the repository's configuration"""
try:
- self.config_reader.get('url')
+ self.config_reader.get("url")
return True
except cp.NoOptionError:
# we have the section at least ...
@@ -536,20 +625,22 @@ class Remote(LazyMixin, IterableObj):
return False
# end
- @ classmethod
- def iter_items(cls, repo: 'Repo', *args: Any, **kwargs: Any) -> Iterator['Remote']:
+ @classmethod
+ def iter_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Iterator["Remote"]:
""":return: Iterator yielding Remote objects of the given repository"""
for section in repo.config_reader("repository").sections():
- if not section.startswith('remote '):
+ if not section.startswith("remote "):
continue
lbound = section.find('"')
rbound = section.rfind('"')
if lbound == -1 or rbound == -1:
raise ValueError("Remote-Section has invalid format: %r" % section)
- yield Remote(repo, section[lbound + 1:rbound])
+ yield Remote(repo, section[lbound + 1 : rbound])
# END for each configuration section
- def set_url(self, new_url: str, old_url: Optional[str] = None, **kwargs: Any) -> 'Remote':
+ def set_url(
+ self, new_url: str, old_url: Optional[str] = None, **kwargs: Any
+ ) -> "Remote":
"""Configure URLs on current remote (cf command git remote set_url)
This command manages URLs on the remote.
@@ -558,15 +649,15 @@ class Remote(LazyMixin, IterableObj):
:param old_url: when set, replaces this URL with new_url for the remote
:return: self
"""
- scmd = 'set-url'
- kwargs['insert_kwargs_after'] = scmd
+ scmd = "set-url"
+ kwargs["insert_kwargs_after"] = scmd
if old_url:
self.repo.git.remote(scmd, self.name, new_url, old_url, **kwargs)
else:
self.repo.git.remote(scmd, self.name, new_url, **kwargs)
return self
- def add_url(self, url: str, **kwargs: Any) -> 'Remote':
+ def add_url(self, url: str, **kwargs: Any) -> "Remote":
"""Adds a new url on current remote (special case of git remote set_url)
This command adds new URLs to a given remote, making it possible to have
@@ -577,7 +668,7 @@ class Remote(LazyMixin, IterableObj):
"""
return self.set_url(url, add=True)
- def delete_url(self, url: str, **kwargs: Any) -> 'Remote':
+ def delete_url(self, url: str, **kwargs: Any) -> "Remote":
"""Deletes a new url on current remote (special case of git remote set_url)
This command deletes new URLs to a given remote, making it possible to have
@@ -588,13 +679,13 @@ class Remote(LazyMixin, IterableObj):
"""
return self.set_url(url, delete=True)
- @ property
+ @property
def urls(self) -> Iterator[str]:
""":return: Iterator yielding all configured URL targets on a remote as strings"""
try:
remote_details = self.repo.git.remote("get-url", "--all", self.name)
assert isinstance(remote_details, str)
- for line in remote_details.split('\n'):
+ for line in remote_details.split("\n"):
yield line
except GitCommandError as ex:
## We are on git < 2.7 (i.e TravisCI as of Oct-2016),
@@ -602,37 +693,44 @@ class Remote(LazyMixin, IterableObj):
# see: https://github.com/gitpython-developers/GitPython/pull/528#issuecomment-252976319
# and: http://stackoverflow.com/a/32991784/548792
#
- if 'Unknown subcommand: get-url' in str(ex):
+ if "Unknown subcommand: get-url" in str(ex):
try:
remote_details = self.repo.git.remote("show", self.name)
assert isinstance(remote_details, str)
- for line in remote_details.split('\n'):
- if ' Push URL:' in line:
- yield line.split(': ')[-1]
+ for line in remote_details.split("\n"):
+ if " Push URL:" in line:
+ yield line.split(": ")[-1]
except GitCommandError as _ex:
- if any(msg in str(_ex) for msg in ['correct access rights', 'cannot run ssh']):
+ if any(
+ msg in str(_ex)
+ for msg in ["correct access rights", "cannot run ssh"]
+ ):
# If ssh is not setup to access this repository, see issue 694
- remote_details = self.repo.git.config('--get-all', 'remote.%s.url' % self.name)
+ remote_details = self.repo.git.config(
+ "--get-all", "remote.%s.url" % self.name
+ )
assert isinstance(remote_details, str)
- for line in remote_details.split('\n'):
+ for line in remote_details.split("\n"):
yield line
else:
raise _ex
else:
raise ex
- @ property
+ @property
def refs(self) -> IterableList[RemoteReference]:
"""
:return:
IterableList of RemoteReference objects. It is prefixed, allowing
you to omit the remote path portion, i.e.::
remote.refs.master # yields RemoteReference('/refs/remotes/origin/master')"""
- out_refs: IterableList[RemoteReference] = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
+ out_refs: IterableList[RemoteReference] = IterableList(
+ RemoteReference._id_attribute_, "%s/" % self.name
+ )
out_refs.extend(RemoteReference.list_items(self.repo, remote=self.name))
return out_refs
- @ property
+ @property
def stale_refs(self) -> IterableList[Reference]:
"""
:return:
@@ -647,8 +745,10 @@ class Remote(LazyMixin, IterableObj):
other kinds of references, for example, tag references, if these are stale
as well. This is a fix for the issue described here:
https://github.com/gitpython-developers/GitPython/issues/260
- """
- out_refs: IterableList[Reference] = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
+ """
+ out_refs: IterableList[Reference] = IterableList(
+ RemoteReference._id_attribute_, "%s/" % self.name
+ )
for line in self.repo.git.remote("prune", "--dry-run", self).splitlines()[2:]:
# expecting
# * [would prune] origin/new_branch
@@ -657,7 +757,7 @@ class Remote(LazyMixin, IterableObj):
continue
ref_name = line.replace(token, "")
# sometimes, paths start with a full ref name, like refs/tags/foo, see #260
- if ref_name.startswith(Reference._common_path_default + '/'):
+ if ref_name.startswith(Reference._common_path_default + "/"):
out_refs.append(Reference.from_path(self.repo, ref_name))
else:
fqhn = "%s/%s" % (RemoteReference._common_path_default, ref_name)
@@ -666,8 +766,8 @@ class Remote(LazyMixin, IterableObj):
# END for each line
return out_refs
- @ classmethod
- def create(cls, repo: 'Repo', name: str, url: str, **kwargs: Any) -> 'Remote':
+ @classmethod
+ def create(cls, repo: "Repo", name: str, url: str, **kwargs: Any) -> "Remote":
"""Create a new remote to the given repository
:param repo: Repository instance that is to receive the new remote
:param name: Desired name of the remote
@@ -675,18 +775,18 @@ class Remote(LazyMixin, IterableObj):
:param kwargs: Additional arguments to be passed to the git-remote add command
:return: New Remote instance
:raise GitCommandError: in case an origin with that name already exists"""
- scmd = 'add'
- kwargs['insert_kwargs_after'] = scmd
+ scmd = "add"
+ kwargs["insert_kwargs_after"] = scmd
repo.git.remote(scmd, name, Git.polish_url(url), **kwargs)
return cls(repo, name)
# add is an alias
- @ classmethod
- def add(cls, repo: 'Repo', name: str, url: str, **kwargs: Any) -> 'Remote':
+ @classmethod
+ def add(cls, repo: "Repo", name: str, url: str, **kwargs: Any) -> "Remote":
return cls.create(repo, name, url, **kwargs)
- @ classmethod
- def remove(cls, repo: 'Repo', name: str) -> str:
+ @classmethod
+ def remove(cls, repo: "Repo", name: str) -> str:
"""Remove the remote with the given name
:return: the passed remote name to remove
"""
@@ -698,9 +798,9 @@ class Remote(LazyMixin, IterableObj):
# alias
rm = remove
- def rename(self, new_name: str) -> 'Remote':
+ def rename(self, new_name: str) -> "Remote":
"""Rename self to the given new_name
- :return: self """
+ :return: self"""
if self.name == new_name:
return self
@@ -710,7 +810,7 @@ class Remote(LazyMixin, IterableObj):
return self
- def update(self, **kwargs: Any) -> 'Remote':
+ def update(self, **kwargs: Any) -> "Remote":
"""Fetch all changes for this remote, including new branches which will
be forced in ( in case your local remote branch is not part the new remote branches
ancestry anymore ).
@@ -718,21 +818,23 @@ class Remote(LazyMixin, IterableObj):
:param kwargs:
Additional arguments passed to git-remote update
- :return: self """
- scmd = 'update'
- kwargs['insert_kwargs_after'] = scmd
+ :return: self"""
+ scmd = "update"
+ kwargs["insert_kwargs_after"] = scmd
self.repo.git.remote(scmd, self.name, **kwargs)
return self
- def _get_fetch_info_from_stderr(self, proc: 'Git.AutoInterrupt',
- progress: Union[Callable[..., Any], RemoteProgress, None],
- kill_after_timeout: Union[None, float] = None,
- ) -> IterableList['FetchInfo']:
+ def _get_fetch_info_from_stderr(
+ self,
+ proc: "Git.AutoInterrupt",
+ progress: Union[Callable[..., Any], RemoteProgress, None],
+ kill_after_timeout: Union[None, float] = None,
+ ) -> IterableList["FetchInfo"]:
progress = to_progress_instance(progress)
# skip first line as it is some remote info we are not interested in
- output: IterableList['FetchInfo'] = IterableList('name')
+ output: IterableList["FetchInfo"] = IterableList("name")
# lines which are no progress are fetch info lines
# this also waits for the command to finish
@@ -743,10 +845,16 @@ class Remote(LazyMixin, IterableObj):
cmds = set(FetchInfo._flag_map.keys())
progress_handler = progress.new_message_handler()
- handle_process_output(proc, None, progress_handler, finalizer=None, decode_streams=False,
- kill_after_timeout=kill_after_timeout)
-
- stderr_text = progress.error_lines and '\n'.join(progress.error_lines) or ''
+ handle_process_output(
+ proc,
+ None,
+ progress_handler,
+ finalizer=None,
+ decode_streams=False,
+ kill_after_timeout=kill_after_timeout,
+ )
+
+ stderr_text = progress.error_lines and "\n".join(progress.error_lines) or ""
proc.wait(stderr=stderr_text)
if stderr_text:
log.warning("Error lines received while fetching: %s", stderr_text)
@@ -754,13 +862,13 @@ class Remote(LazyMixin, IterableObj):
for line in progress.other_lines:
line = force_text(line)
for cmd in cmds:
- if len(line) > 1 and line[0] == ' ' and line[1] == cmd:
+ if len(line) > 1 and line[0] == " " and line[1] == cmd:
fetch_info_lines.append(line)
continue
# read head information
fetch_head = SymbolicReference(self.repo, "FETCH_HEAD")
- with open(fetch_head.abspath, 'rb') as fp:
+ with open(fetch_head.abspath, "rb") as fp:
fetch_head_info = [line.decode(defenc) for line in fp.readlines()]
l_fil = len(fetch_info_lines)
@@ -788,9 +896,12 @@ class Remote(LazyMixin, IterableObj):
log.warning("Git informed while fetching: %s", err_line.strip())
return output
- def _get_push_info(self, proc: 'Git.AutoInterrupt',
- progress: Union[Callable[..., Any], RemoteProgress, None],
- kill_after_timeout: Union[None, float] = None) -> PushInfoList:
+ def _get_push_info(
+ self,
+ proc: "Git.AutoInterrupt",
+ progress: Union[Callable[..., Any], RemoteProgress, None],
+ kill_after_timeout: Union[None, float] = None,
+ ) -> PushInfoList:
progress = to_progress_instance(progress)
# read progress information from stderr
@@ -807,9 +918,15 @@ class Remote(LazyMixin, IterableObj):
# If an error happens, additional info is given which we parse below.
pass
- handle_process_output(proc, stdout_handler, progress_handler, finalizer=None, decode_streams=False,
- kill_after_timeout=kill_after_timeout)
- stderr_text = progress.error_lines and '\n'.join(progress.error_lines) or ''
+ handle_process_output(
+ proc,
+ stdout_handler,
+ progress_handler,
+ finalizer=None,
+ decode_streams=False,
+ kill_after_timeout=kill_after_timeout,
+ )
+ stderr_text = progress.error_lines and "\n".join(progress.error_lines) or ""
try:
proc.wait(stderr=stderr_text)
except Exception as e:
@@ -826,9 +943,9 @@ class Remote(LazyMixin, IterableObj):
def _assert_refspec(self) -> None:
"""Turns out we can't deal with remotes if the refspec is missing"""
config = self.config_reader
- unset = 'placeholder'
+ unset = "placeholder"
try:
- if config.get_value('fetch', default=unset) is unset:
+ if config.get_value("fetch", default=unset) is unset:
msg = "Remote '%s' has no refspec set.\n"
msg += "You can set it as follows:"
msg += " 'git config --add \"remote.%s.fetch +refs/heads/*:refs/heads/*\"'."
@@ -836,11 +953,14 @@ class Remote(LazyMixin, IterableObj):
finally:
config.release()
- def fetch(self, refspec: Union[str, List[str], None] = None,
- progress: Union[RemoteProgress, None, 'UpdateProgress'] = None,
- verbose: bool = True,
- kill_after_timeout: Union[None, float] = None,
- **kwargs: Any) -> IterableList[FetchInfo]:
+ def fetch(
+ self,
+ refspec: Union[str, List[str], None] = None,
+ progress: Union[RemoteProgress, None, "UpdateProgress"] = None,
+ verbose: bool = True,
+ kill_after_timeout: Union[None, float] = None,
+ **kwargs: Any
+ ) -> IterableList[FetchInfo]:
"""Fetch the latest changes for this remote
:param refspec:
@@ -881,18 +1001,29 @@ class Remote(LazyMixin, IterableObj):
else:
args = [refspec]
- proc = self.repo.git.fetch(self, *args, as_process=True, with_stdout=False,
- universal_newlines=True, v=verbose, **kwargs)
- res = self._get_fetch_info_from_stderr(proc, progress,
- kill_after_timeout=kill_after_timeout)
- if hasattr(self.repo.odb, 'update_cache'):
+ proc = self.repo.git.fetch(
+ self,
+ *args,
+ as_process=True,
+ with_stdout=False,
+ universal_newlines=True,
+ v=verbose,
+ **kwargs
+ )
+ res = self._get_fetch_info_from_stderr(
+ proc, progress, kill_after_timeout=kill_after_timeout
+ )
+ if hasattr(self.repo.odb, "update_cache"):
self.repo.odb.update_cache()
return res
- def pull(self, refspec: Union[str, List[str], None] = None,
- progress: Union[RemoteProgress, 'UpdateProgress', None] = None,
- kill_after_timeout: Union[None, float] = None,
- **kwargs: Any) -> IterableList[FetchInfo]:
+ def pull(
+ self,
+ refspec: Union[str, List[str], None] = None,
+ progress: Union[RemoteProgress, "UpdateProgress", None] = None,
+ kill_after_timeout: Union[None, float] = None,
+ **kwargs: Any
+ ) -> IterableList[FetchInfo]:
"""Pull changes from the given branch, being the same as a fetch followed
by a merge of branch with your local branch.
@@ -900,23 +1031,36 @@ class Remote(LazyMixin, IterableObj):
:param progress: see 'push' method
:param kill_after_timeout: see 'fetch' method
:param kwargs: Additional arguments to be passed to git-pull
- :return: Please see 'fetch' method """
+ :return: Please see 'fetch' method"""
if refspec is None:
# No argument refspec, then ensure the repo's config has a fetch refspec.
self._assert_refspec()
kwargs = add_progress(kwargs, self.repo.git, progress)
- proc = self.repo.git.pull(self, refspec, with_stdout=False, as_process=True,
- universal_newlines=True, v=True, **kwargs)
- res = self._get_fetch_info_from_stderr(proc, progress,
- kill_after_timeout=kill_after_timeout)
- if hasattr(self.repo.odb, 'update_cache'):
+ proc = self.repo.git.pull(
+ self,
+ refspec,
+ with_stdout=False,
+ as_process=True,
+ universal_newlines=True,
+ v=True,
+ **kwargs
+ )
+ res = self._get_fetch_info_from_stderr(
+ proc, progress, kill_after_timeout=kill_after_timeout
+ )
+ if hasattr(self.repo.odb, "update_cache"):
self.repo.odb.update_cache()
return res
- def push(self, refspec: Union[str, List[str], None] = None,
- progress: Union[RemoteProgress, 'UpdateProgress', Callable[..., RemoteProgress], None] = None,
- kill_after_timeout: Union[None, float] = None,
- **kwargs: Any) -> IterableList[PushInfo]:
+ def push(
+ self,
+ refspec: Union[str, List[str], None] = None,
+ progress: Union[
+ RemoteProgress, "UpdateProgress", Callable[..., RemoteProgress], None
+ ] = None,
+ kill_after_timeout: Union[None, float] = None,
+ **kwargs: Any
+ ) -> IterableList[PushInfo]:
"""Push changes from source branch in refspec to target branch in refspec.
:param refspec: see 'fetch' method
@@ -945,14 +1089,20 @@ class Remote(LazyMixin, IterableObj):
If the operation fails completely, the length of the returned IterableList will
be 0."""
kwargs = add_progress(kwargs, self.repo.git, progress)
- proc = self.repo.git.push(self, refspec, porcelain=True, as_process=True,
- universal_newlines=True,
- kill_after_timeout=kill_after_timeout,
- **kwargs)
- return self._get_push_info(proc, progress,
- kill_after_timeout=kill_after_timeout)
-
- @ property
+ proc = self.repo.git.push(
+ self,
+ refspec,
+ porcelain=True,
+ as_process=True,
+ universal_newlines=True,
+ kill_after_timeout=kill_after_timeout,
+ **kwargs
+ )
+ return self._get_push_info(
+ proc, progress, kill_after_timeout=kill_after_timeout
+ )
+
+ @property
def config_reader(self) -> SectionConstraint[GitConfigParser]:
"""
:return:
@@ -962,12 +1112,12 @@ class Remote(LazyMixin, IterableObj):
def _clear_cache(self) -> None:
try:
- del(self._config_reader)
+ del self._config_reader
except AttributeError:
pass
# END handle exception
- @ property
+ @property
def config_writer(self) -> SectionConstraint:
"""
:return: GitConfigParser compatible object able to write options for this remote.
diff --git a/git/repo/base.py b/git/repo/base.py
index bea0dcb5..356a8f2f 100644
--- a/git/repo/base.py
+++ b/git/repo/base.py
@@ -13,10 +13,7 @@ from gitdb.db.loose import LooseObjectDB
from gitdb.exc import BadObject
-from git.cmd import (
- Git,
- handle_process_output
-)
+from git.cmd import Git, handle_process_output
from git.compat import (
defenc,
safe_decode,
@@ -29,20 +26,54 @@ from git.index import IndexFile
from git.objects import Submodule, RootModule, Commit
from git.refs import HEAD, Head, Reference, TagReference
from git.remote import Remote, add_progress, to_progress_instance
-from git.util import Actor, finalize_process, decygpath, hex_to_bin, expand_path, remove_password_if_present
+from git.util import (
+ Actor,
+ finalize_process,
+ decygpath,
+ hex_to_bin,
+ expand_path,
+ remove_password_if_present,
+)
import os.path as osp
-from .fun import rev_parse, is_git_dir, find_submodule_git_dir, touch, find_worktree_git_dir
+from .fun import (
+ rev_parse,
+ is_git_dir,
+ find_submodule_git_dir,
+ touch,
+ find_worktree_git_dir,
+)
import gc
import gitdb
# typing ------------------------------------------------------
-from git.types import TBD, PathLike, Lit_config_levels, Commit_ish, Tree_ish, assert_never
-from typing import (Any, BinaryIO, Callable, Dict,
- Iterator, List, Mapping, Optional, Sequence,
- TextIO, Tuple, Type, Union,
- NamedTuple, cast, TYPE_CHECKING)
+from git.types import (
+ TBD,
+ PathLike,
+ Lit_config_levels,
+ Commit_ish,
+ Tree_ish,
+ assert_never,
+)
+from typing import (
+ Any,
+ BinaryIO,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ TextIO,
+ Tuple,
+ Type,
+ Union,
+ NamedTuple,
+ cast,
+ TYPE_CHECKING,
+)
from git.types import ConfigLevels_Tup, TypedDict
@@ -57,11 +88,11 @@ if TYPE_CHECKING:
log = logging.getLogger(__name__)
-__all__ = ('Repo',)
+__all__ = ("Repo",)
class BlameEntry(NamedTuple):
- commit: Dict[str, 'Commit']
+ commit: Dict[str, "Commit"]
linenos: range
orig_path: Optional[str]
orig_linenos: range
@@ -81,21 +112,24 @@ class Repo(object):
if we are a bare repository.
'git_dir' is the .git repository directory, which is always set."""
- DAEMON_EXPORT_FILE = 'git-daemon-export-ok'
- git = cast('Git', None) # Must exist, or __del__ will fail in case we raise on `__init__()`
+ DAEMON_EXPORT_FILE = "git-daemon-export-ok"
+
+ git = cast(
+ "Git", None
+ ) # Must exist, or __del__ will fail in case we raise on `__init__()`
working_dir: Optional[PathLike] = None
_working_tree_dir: Optional[PathLike] = None
git_dir: PathLike = ""
_common_dir: PathLike = ""
# precompiled regex
- re_whitespace = re.compile(r'\s+')
- re_hexsha_only = re.compile('^[0-9A-Fa-f]{40}$')
- re_hexsha_shortened = re.compile('^[0-9A-Fa-f]{4,40}$')
- re_envvars = re.compile(r'(\$(\{\s?)?[a-zA-Z_]\w*(\}\s?)?|%\s?[a-zA-Z_]\w*\s?%)')
- re_author_committer_start = re.compile(r'^(author|committer)')
- re_tab_full_line = re.compile(r'^\t(.*)$')
+ re_whitespace = re.compile(r"\s+")
+ re_hexsha_only = re.compile("^[0-9A-Fa-f]{40}$")
+ re_hexsha_shortened = re.compile("^[0-9A-Fa-f]{4,40}$")
+ re_envvars = re.compile(r"(\$(\{\s?)?[a-zA-Z_]\w*(\}\s?)?|%\s?[a-zA-Z_]\w*\s?%)")
+ re_author_committer_start = re.compile(r"^(author|committer)")
+ re_tab_full_line = re.compile(r"^\t(.*)$")
# invariants
# represents the configuration level of a configuration file
@@ -105,8 +139,13 @@ class Repo(object):
# Subclasses may easily bring in their own custom types by placing a constructor or type here
GitCommandWrapperType = Git
- def __init__(self, path: Optional[PathLike] = None, odbt: Type[LooseObjectDB] = GitCmdObjectDB,
- search_parent_directories: bool = False, expand_vars: bool = True) -> None:
+ def __init__(
+ self,
+ path: Optional[PathLike] = None,
+ odbt: Type[LooseObjectDB] = GitCmdObjectDB,
+ search_parent_directories: bool = False,
+ expand_vars: bool = True,
+ ) -> None:
"""Create a new Repo instance
:param path:
@@ -132,9 +171,9 @@ class Repo(object):
which is considered a bug though.
:raise InvalidGitRepositoryError:
:raise NoSuchPathError:
- :return: git.Repo """
+ :return: git.Repo"""
- epath = path or os.getenv('GIT_DIR')
+ epath = path or os.getenv("GIT_DIR")
if not epath:
epath = os.getcwd()
if Git.is_cygwin():
@@ -144,8 +183,10 @@ class Repo(object):
if not isinstance(epath, str):
epath = str(epath)
if expand_vars and re.search(self.re_envvars, epath):
- warnings.warn("The use of environment variables in paths is deprecated" +
- "\nfor security reasons and may be removed in the future!!")
+ warnings.warn(
+ "The use of environment variables in paths is deprecated"
+ + "\nfor security reasons and may be removed in the future!!"
+ )
epath = expand_path(epath, expand_vars)
if epath is not None:
if not os.path.exists(epath):
@@ -170,15 +211,15 @@ class Repo(object):
# If GIT_DIR is specified but none of GIT_WORK_TREE and core.worktree is specified,
# the current working directory is regarded as the top level of your working tree.
self._working_tree_dir = os.path.dirname(self.git_dir)
- if os.environ.get('GIT_COMMON_DIR') is None:
+ if os.environ.get("GIT_COMMON_DIR") is None:
gitconf = self.config_reader("repository")
- if gitconf.has_option('core', 'worktree'):
- self._working_tree_dir = gitconf.get('core', 'worktree')
- if 'GIT_WORK_TREE' in os.environ:
- self._working_tree_dir = os.getenv('GIT_WORK_TREE')
+ if gitconf.has_option("core", "worktree"):
+ self._working_tree_dir = gitconf.get("core", "worktree")
+ if "GIT_WORK_TREE" in os.environ:
+ self._working_tree_dir = os.getenv("GIT_WORK_TREE")
break
- dotgit = osp.join(curpath, '.git')
+ dotgit = osp.join(curpath, ".git")
sm_gitpath = find_submodule_git_dir(dotgit)
if sm_gitpath is not None:
self.git_dir = osp.normpath(sm_gitpath)
@@ -204,13 +245,15 @@ class Repo(object):
self._bare = False
try:
- self._bare = self.config_reader("repository").getboolean('core', 'bare')
+ self._bare = self.config_reader("repository").getboolean("core", "bare")
except Exception:
# lets not assume the option exists, although it should
pass
try:
- common_dir = open(osp.join(self.git_dir, 'commondir'), 'rt').readlines()[0].strip()
+ common_dir = (
+ open(osp.join(self.git_dir, "commondir"), "rt").readlines()[0].strip()
+ )
self._common_dir = osp.join(self.git_dir, common_dir)
except OSError:
self._common_dir = ""
@@ -225,13 +268,13 @@ class Repo(object):
self.git = self.GitCommandWrapperType(self.working_dir)
# special handling, in special times
- rootpath = osp.join(self.common_dir, 'objects')
+ rootpath = osp.join(self.common_dir, "objects")
if issubclass(odbt, GitCmdObjectDB):
self.odb = odbt(rootpath, self.git)
else:
self.odb = odbt(rootpath)
- def __enter__(self) -> 'Repo':
+ def __enter__(self) -> "Repo":
return self
def __exit__(self, *args: Any) -> None:
@@ -272,25 +315,25 @@ class Repo(object):
# Description property
def _get_description(self) -> str:
if self.git_dir:
- filename = osp.join(self.git_dir, 'description')
- with open(filename, 'rb') as fp:
+ filename = osp.join(self.git_dir, "description")
+ with open(filename, "rb") as fp:
return fp.read().rstrip().decode(defenc)
def _set_description(self, descr: str) -> None:
if self.git_dir:
- filename = osp.join(self.git_dir, 'description')
- with open(filename, 'wb') as fp:
- fp.write((descr + '\n').encode(defenc))
+ filename = osp.join(self.git_dir, "description")
+ with open(filename, "wb") as fp:
+ fp.write((descr + "\n").encode(defenc))
- description = property(_get_description, _set_description,
- doc="the project's description")
+ description = property(
+ _get_description, _set_description, doc="the project's description"
+ )
del _get_description
del _set_description
@property
def working_tree_dir(self) -> Optional[PathLike]:
- """:return: The working tree directory of our git repository. If this is a bare repository, None is returned.
- """
+ """:return: The working tree directory of our git repository. If this is a bare repository, None is returned."""
return self._working_tree_dir
@property
@@ -312,7 +355,7 @@ class Repo(object):
return self._bare
@property
- def heads(self) -> 'IterableList[Head]':
+ def heads(self) -> "IterableList[Head]":
"""A list of ``Head`` objects representing the branch heads in
this repo
@@ -320,7 +363,7 @@ class Repo(object):
return Head.list_items(self)
@property
- def references(self) -> 'IterableList[Reference]':
+ def references(self) -> "IterableList[Reference]":
"""A list of Reference objects representing tags, heads and remote references.
:return: IterableList(Reference, ...)"""
@@ -333,24 +376,24 @@ class Repo(object):
branches = heads
@property
- def index(self) -> 'IndexFile':
+ def index(self) -> "IndexFile":
""":return: IndexFile representing this repository's index.
:note: This property can be expensive, as the returned ``IndexFile`` will be
reinitialized. It's recommended to re-use the object."""
return IndexFile(self)
@property
- def head(self) -> 'HEAD':
+ def head(self) -> "HEAD":
""":return: HEAD Object pointing to the current head reference"""
- return HEAD(self, 'HEAD')
+ return HEAD(self, "HEAD")
@property
- def remotes(self) -> 'IterableList[Remote]':
+ def remotes(self) -> "IterableList[Remote]":
"""A list of Remote objects allowing to access and manipulate remotes
:return: ``git.IterableList(Remote, ...)``"""
return Remote.list_items(self)
- def remote(self, name: str = 'origin') -> 'Remote':
+ def remote(self, name: str = "origin") -> "Remote":
""":return: Remote with the specified name
:raise ValueError: if no remote with such a name exists"""
r = Remote(self, name)
@@ -358,17 +401,17 @@ class Repo(object):
raise ValueError("Remote named '%s' didn't exist" % name)
return r
- #{ Submodules
+ # { Submodules
@property
- def submodules(self) -> 'IterableList[Submodule]':
+ def submodules(self) -> "IterableList[Submodule]":
"""
:return: git.IterableList(Submodule, ...) of direct submodules
available from the current head"""
return Submodule.list_items(self)
- def submodule(self, name: str) -> 'Submodule':
- """ :return: Submodule with the given name
+ def submodule(self, name: str) -> "Submodule":
+ """:return: Submodule with the given name
:raise ValueError: If no such submodule exists"""
try:
return self.submodules[name]
@@ -396,53 +439,61 @@ class Repo(object):
see the documentation of RootModule.update"""
return RootModule(self).update(*args, **kwargs)
- #}END submodules
+ # }END submodules
@property
- def tags(self) -> 'IterableList[TagReference]':
+ def tags(self) -> "IterableList[TagReference]":
"""A list of ``Tag`` objects that are available in this repo
- :return: ``git.IterableList(TagReference, ...)`` """
+ :return: ``git.IterableList(TagReference, ...)``"""
return TagReference.list_items(self)
def tag(self, path: PathLike) -> TagReference:
""":return: TagReference Object, reference pointing to a Commit or Tag
- :param path: path to the tag reference, i.e. 0.1.5 or tags/0.1.5 """
+ :param path: path to the tag reference, i.e. 0.1.5 or tags/0.1.5"""
full_path = self._to_full_tag_path(path)
return TagReference(self, full_path)
@staticmethod
def _to_full_tag_path(path: PathLike) -> str:
path_str = str(path)
- if path_str.startswith(TagReference._common_path_default + '/'):
+ if path_str.startswith(TagReference._common_path_default + "/"):
return path_str
- if path_str.startswith(TagReference._common_default + '/'):
- return Reference._common_path_default + '/' + path_str
+ if path_str.startswith(TagReference._common_default + "/"):
+ return Reference._common_path_default + "/" + path_str
else:
- return TagReference._common_path_default + '/' + path_str
-
- def create_head(self, path: PathLike,
- commit: Union['SymbolicReference', 'str'] = 'HEAD',
- force: bool = False, logmsg: Optional[str] = None
- ) -> 'Head':
+ return TagReference._common_path_default + "/" + path_str
+
+ def create_head(
+ self,
+ path: PathLike,
+ commit: Union["SymbolicReference", "str"] = "HEAD",
+ force: bool = False,
+ logmsg: Optional[str] = None,
+ ) -> "Head":
"""Create a new head within the repository.
For more documentation, please see the Head.create method.
:return: newly created Head Reference"""
return Head.create(self, path, commit, logmsg, force)
- def delete_head(self, *heads: 'Union[str, Head]', **kwargs: Any) -> None:
+ def delete_head(self, *heads: "Union[str, Head]", **kwargs: Any) -> None:
"""Delete the given heads
:param kwargs: Additional keyword arguments to be passed to git-branch"""
return Head.delete(self, *heads, **kwargs)
- def create_tag(self, path: PathLike, ref: str = 'HEAD',
- message: Optional[str] = None, force: bool = False, **kwargs: Any
- ) -> TagReference:
+ def create_tag(
+ self,
+ path: PathLike,
+ ref: str = "HEAD",
+ message: Optional[str] = None,
+ force: bool = False,
+ **kwargs: Any,
+ ) -> TagReference:
"""Create a new tag reference.
For more documentation, please see the TagReference.create method.
- :return: TagReference object """
+ :return: TagReference object"""
return TagReference.create(self, path, ref, message, force, **kwargs)
def delete_tag(self, *tags: TagReference) -> None:
@@ -458,7 +509,7 @@ class Repo(object):
:return: Remote reference"""
return Remote.create(self, name, url, **kwargs)
- def delete_remote(self, remote: 'Remote') -> str:
+ def delete_remote(self, remote: "Remote") -> str:
"""Delete the given remote."""
return Remote.remove(self, remote)
@@ -471,7 +522,9 @@ class Repo(object):
if config_level == "system":
return "/etc/gitconfig"
elif config_level == "user":
- config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(os.environ.get("HOME", '~'), ".config")
+ config_home = os.environ.get("XDG_CONFIG_HOME") or osp.join(
+ os.environ.get("HOME", "~"), ".config"
+ )
return osp.normpath(osp.expanduser(osp.join(config_home, "git", "config")))
elif config_level == "global":
return osp.normpath(osp.expanduser("~/.gitconfig"))
@@ -483,11 +536,15 @@ class Repo(object):
return osp.normpath(osp.join(repo_dir, "config"))
else:
- assert_never(config_level, # type:ignore[unreachable]
- ValueError(f"Invalid configuration level: {config_level!r}"))
+ assert_never(
+ config_level, # type:ignore[unreachable]
+ ValueError(f"Invalid configuration level: {config_level!r}"),
+ )
- def config_reader(self, config_level: Optional[Lit_config_levels] = None,
- ) -> GitConfigParser:
+ def config_reader(
+ self,
+ config_level: Optional[Lit_config_levels] = None,
+ ) -> GitConfigParser:
"""
:return:
GitConfigParser allowing to read the full git configuration, but not to write it
@@ -503,14 +560,18 @@ class Repo(object):
unknown, instead the global path will be used."""
files = None
if config_level is None:
- files = [self._get_config_path(cast(Lit_config_levels, f))
- for f in self.config_level if cast(Lit_config_levels, f)]
+ files = [
+ self._get_config_path(cast(Lit_config_levels, f))
+ for f in self.config_level
+ if cast(Lit_config_levels, f)
+ ]
else:
files = [self._get_config_path(config_level)]
return GitConfigParser(files, read_only=True, repo=self)
- def config_writer(self, config_level: Lit_config_levels = "repository"
- ) -> GitConfigParser:
+ def config_writer(
+ self, config_level: Lit_config_levels = "repository"
+ ) -> GitConfigParser:
"""
:return:
GitConfigParser allowing to write values of the specified configuration file level.
@@ -523,10 +584,11 @@ class Repo(object):
system = system wide configuration file
global = user level configuration file
repository = configuration file for this repository only"""
- return GitConfigParser(self._get_config_path(config_level), read_only=False, repo=self)
+ return GitConfigParser(
+ self._get_config_path(config_level), read_only=False, repo=self
+ )
- def commit(self, rev: Union[str, Commit_ish, None] = None
- ) -> Commit:
+ def commit(self, rev: Union[str, Commit_ish, None] = None) -> Commit:
"""The Commit object for the specified revision
:param rev: revision specifier, see git-rev-parse for viable options.
@@ -536,12 +598,12 @@ class Repo(object):
return self.head.commit
return self.rev_parse(str(rev) + "^0")
- def iter_trees(self, *args: Any, **kwargs: Any) -> Iterator['Tree']:
+ def iter_trees(self, *args: Any, **kwargs: Any) -> Iterator["Tree"]:
""":return: Iterator yielding Tree objects
:note: Takes all arguments known to iter_commits method"""
return (c.tree for c in self.iter_commits(*args, **kwargs))
- def tree(self, rev: Union[Tree_ish, str, None] = None) -> 'Tree':
+ def tree(self, rev: Union[Tree_ish, str, None] = None) -> "Tree":
"""The Tree object for the given treeish revision
Examples::
@@ -558,9 +620,12 @@ class Repo(object):
return self.head.commit.tree
return self.rev_parse(str(rev) + "^{tree}")
- def iter_commits(self, rev: Union[str, Commit, 'SymbolicReference', None] = None,
- paths: Union[PathLike, Sequence[PathLike]] = '',
- **kwargs: Any) -> Iterator[Commit]:
+ def iter_commits(
+ self,
+ rev: Union[str, Commit, "SymbolicReference", None] = None,
+ paths: Union[PathLike, Sequence[PathLike]] = "",
+ **kwargs: Any,
+ ) -> Iterator[Commit]:
"""A list of Commit objects representing the history of a given ref/commit
:param rev:
@@ -584,8 +649,7 @@ class Repo(object):
return Commit.iter_items(self, rev, paths, **kwargs)
- def merge_base(self, *rev: TBD, **kwargs: Any
- ) -> List[Union[Commit_ish, None]]:
+ def merge_base(self, *rev: TBD, **kwargs: Any) -> List[Union[Commit_ish, None]]:
"""Find the closest common ancestor for the given revision (e.g. Commits, Tags, References, etc)
:param rev: At least two revs to find the common ancestor for.
@@ -616,7 +680,7 @@ class Repo(object):
return res
- def is_ancestor(self, ancestor_rev: 'Commit', rev: 'Commit') -> bool:
+ def is_ancestor(self, ancestor_rev: "Commit", rev: "Commit") -> bool:
"""Check if a commit is an ancestor of another
:param ancestor_rev: Rev which should be an ancestor
@@ -639,8 +703,11 @@ class Repo(object):
if object_info.type == object_type.encode():
return True
else:
- log.debug("Commit hash points to an object of type '%s'. Requested were objects of type '%s'",
- object_info.type.decode(), object_type)
+ log.debug(
+ "Commit hash points to an object of type '%s'. Requested were objects of type '%s'",
+ object_info.type.decode(),
+ object_type,
+ )
return False
else:
return True
@@ -662,8 +729,11 @@ class Repo(object):
elif not value and fileexists:
os.unlink(filename)
- daemon_export = property(_get_daemon_export, _set_daemon_export,
- doc="If True, git-daemon may export this repository")
+ daemon_export = property(
+ _get_daemon_export,
+ _set_daemon_export,
+ doc="If True, git-daemon may export this repository",
+ )
del _get_daemon_export
del _set_daemon_export
@@ -672,10 +742,10 @@ class Repo(object):
:return: list of strings being pathnames of alternates"""
if self.git_dir:
- alternates_path = osp.join(self.git_dir, 'objects', 'info', 'alternates')
+ alternates_path = osp.join(self.git_dir, "objects", "info", "alternates")
if osp.exists(alternates_path):
- with open(alternates_path, 'rb') as f:
+ with open(alternates_path, "rb") as f:
alts = f.read().decode(defenc)
return alts.strip().splitlines()
return []
@@ -691,19 +761,28 @@ class Repo(object):
:note:
The method does not check for the existence of the paths in alts
as the caller is responsible."""
- alternates_path = osp.join(self.common_dir, 'objects', 'info', 'alternates')
+ alternates_path = osp.join(self.common_dir, "objects", "info", "alternates")
if not alts:
if osp.isfile(alternates_path):
os.remove(alternates_path)
else:
- with open(alternates_path, 'wb') as f:
+ with open(alternates_path, "wb") as f:
f.write("\n".join(alts).encode(defenc))
- alternates = property(_get_alternates, _set_alternates,
- doc="Retrieve a list of alternates paths or set a list paths to be used as alternates")
-
- def is_dirty(self, index: bool = True, working_tree: bool = True, untracked_files: bool = False,
- submodules: bool = True, path: Optional[PathLike] = None) -> bool:
+ alternates = property(
+ _get_alternates,
+ _set_alternates,
+ doc="Retrieve a list of alternates paths or set a list paths to be used as alternates",
+ )
+
+ def is_dirty(
+ self,
+ index: bool = True,
+ working_tree: bool = True,
+ untracked_files: bool = False,
+ submodules: bool = True,
+ path: Optional[PathLike] = None,
+ ) -> bool:
"""
:return:
``True``, the repository is considered dirty. By default it will react
@@ -715,15 +794,16 @@ class Repo(object):
return False
# start from the one which is fastest to evaluate
- default_args = ['--abbrev=40', '--full-index', '--raw']
+ default_args = ["--abbrev=40", "--full-index", "--raw"]
if not submodules:
- default_args.append('--ignore-submodules')
+ default_args.append("--ignore-submodules")
if path:
default_args.extend(["--", str(path)])
if index:
# diff index against HEAD
- if osp.isfile(self.index.path) and \
- len(self.git.diff('--cached', *default_args)):
+ if osp.isfile(self.index.path) and len(
+ self.git.diff("--cached", *default_args)
+ ):
return True
# END index handling
if working_tree:
@@ -755,11 +835,9 @@ class Repo(object):
def _get_untracked_files(self, *args: Any, **kwargs: Any) -> List[str]:
# make sure we get all files, not only untracked directories
- proc = self.git.status(*args,
- porcelain=True,
- untracked_files=True,
- as_process=True,
- **kwargs)
+ proc = self.git.status(
+ *args, porcelain=True, untracked_files=True, as_process=True, **kwargs
+ )
# Untracked files prefix in porcelain mode
prefix = "?? "
untracked_files = []
@@ -767,12 +845,17 @@ class Repo(object):
line = line.decode(defenc)
if not line.startswith(prefix):
continue
- filename = line[len(prefix):].rstrip('\n')
+ filename = line[len(prefix) :].rstrip("\n")
# Special characters are escaped
if filename[0] == filename[-1] == '"':
filename = filename[1:-1]
# WHATEVER ... it's a mess, but works for me
- filename = filename.encode('ascii').decode('unicode_escape').encode('latin1').decode(defenc)
+ filename = (
+ filename.encode("ascii")
+ .decode("unicode_escape")
+ .encode("latin1")
+ .decode(defenc)
+ )
untracked_files.append(filename)
finalize_process(proc)
return untracked_files
@@ -797,7 +880,9 @@ class Repo(object):
# reveal_type(self.head.reference) # => Reference
return self.head.reference
- def blame_incremental(self, rev: str | HEAD, file: str, **kwargs: Any) -> Iterator['BlameEntry']:
+ def blame_incremental(
+ self, rev: str | HEAD, file: str, **kwargs: Any
+ ) -> Iterator["BlameEntry"]:
"""Iterator for blame information for the given file at the given revision.
Unlike .blame(), this does not return the actual file's contents, only
@@ -812,13 +897,17 @@ class Repo(object):
should get a continuous range spanning all line numbers in the file.
"""
- data: bytes = self.git.blame(rev, '--', file, p=True, incremental=True, stdout_as_string=False, **kwargs)
+ data: bytes = self.git.blame(
+ rev, "--", file, p=True, incremental=True, stdout_as_string=False, **kwargs
+ )
commits: Dict[bytes, Commit] = {}
- stream = (line for line in data.split(b'\n') if line)
+ stream = (line for line in data.split(b"\n") if line)
while True:
try:
- line = next(stream) # when exhausted, causes a StopIteration, terminating this function
+ line = next(
+ stream
+ ) # when exhausted, causes a StopIteration, terminating this function
except StopIteration:
return
split_line = line.split()
@@ -835,46 +924,58 @@ class Repo(object):
line = next(stream)
except StopIteration:
return
- if line == b'boundary':
+ if line == b"boundary":
# "boundary" indicates a root commit and occurs
# instead of the "previous" tag
continue
- tag, value = line.split(b' ', 1)
+ tag, value = line.split(b" ", 1)
props[tag] = value
- if tag == b'filename':
+ if tag == b"filename":
# "filename" formally terminates the entry for --incremental
orig_filename = value
break
- c = Commit(self, hex_to_bin(hexsha),
- author=Actor(safe_decode(props[b'author']),
- safe_decode(props[b'author-mail'].lstrip(b'<').rstrip(b'>'))),
- authored_date=int(props[b'author-time']),
- committer=Actor(safe_decode(props[b'committer']),
- safe_decode(props[b'committer-mail'].lstrip(b'<').rstrip(b'>'))),
- committed_date=int(props[b'committer-time']))
+ c = Commit(
+ self,
+ hex_to_bin(hexsha),
+ author=Actor(
+ safe_decode(props[b"author"]),
+ safe_decode(props[b"author-mail"].lstrip(b"<").rstrip(b">")),
+ ),
+ authored_date=int(props[b"author-time"]),
+ committer=Actor(
+ safe_decode(props[b"committer"]),
+ safe_decode(props[b"committer-mail"].lstrip(b"<").rstrip(b">")),
+ ),
+ committed_date=int(props[b"committer-time"]),
+ )
commits[hexsha] = c
else:
# Discard all lines until we find "filename" which is
# guaranteed to be the last line
while True:
try:
- line = next(stream) # will fail if we reach the EOF unexpectedly
+ line = next(
+ stream
+ ) # will fail if we reach the EOF unexpectedly
except StopIteration:
return
- tag, value = line.split(b' ', 1)
- if tag == b'filename':
+ tag, value = line.split(b" ", 1)
+ if tag == b"filename":
orig_filename = value
break
- yield BlameEntry(commits[hexsha],
- range(lineno, lineno + num_lines),
- safe_decode(orig_filename),
- range(orig_lineno, orig_lineno + num_lines))
+ yield BlameEntry(
+ commits[hexsha],
+ range(lineno, lineno + num_lines),
+ safe_decode(orig_filename),
+ range(orig_lineno, orig_lineno + num_lines),
+ )
- def blame(self, rev: Union[str, HEAD], file: str, incremental: bool = False, **kwargs: Any
- ) -> List[List[Commit | List[str | bytes] | None]] | Iterator[BlameEntry] | None:
+ def blame(
+ self, rev: Union[str, HEAD], file: str, incremental: bool = False, **kwargs: Any
+ ) -> List[List[Commit | List[str | bytes] | None]] | Iterator[BlameEntry] | None:
"""The blame information for the given file at the given revision.
:param rev: revision specifier, see git-rev-parse for viable options.
@@ -886,7 +987,9 @@ class Repo(object):
if incremental:
return self.blame_incremental(rev, file, **kwargs)
- data: bytes = self.git.blame(rev, '--', file, p=True, stdout_as_string=False, **kwargs)
+ data: bytes = self.git.blame(
+ rev, "--", file, p=True, stdout_as_string=False, **kwargs
+ )
commits: Dict[str, Commit] = {}
blames: List[List[Commit | List[str | bytes] | None]] = []
@@ -909,7 +1012,7 @@ class Repo(object):
try:
line_str = line_bytes.rstrip().decode(defenc)
except UnicodeDecodeError:
- firstpart = ''
+ firstpart = ""
parts = []
is_binary = True
else:
@@ -929,10 +1032,10 @@ class Repo(object):
# another line of blame with the same data
digits = parts[-1].split(" ")
if len(digits) == 3:
- info = {'id': firstpart}
+ info = {"id": firstpart}
blames.append([None, []])
- elif info['id'] != firstpart:
- info = {'id': firstpart}
+ elif info["id"] != firstpart:
+ info = {"id": firstpart}
blames.append([commits.get(firstpart), []])
# END blame data initialization
else:
@@ -948,17 +1051,17 @@ class Repo(object):
# committer-time 1192271832
# committer-tz -0700 - IGNORED BY US
role = m.group(0)
- if role == 'author':
- if firstpart.endswith('-mail'):
+ if role == "author":
+ if firstpart.endswith("-mail"):
info["author_email"] = parts[-1]
- elif firstpart.endswith('-time'):
+ elif firstpart.endswith("-time"):
info["author_date"] = int(parts[-1])
elif role == firstpart:
info["author"] = parts[-1]
- elif role == 'committer':
- if firstpart.endswith('-mail'):
+ elif role == "committer":
+ if firstpart.endswith("-mail"):
info["committer_email"] = parts[-1]
- elif firstpart.endswith('-time'):
+ elif firstpart.endswith("-time"):
info["committer_date"] = int(parts[-1])
elif role == firstpart:
info["committer"] = parts[-1]
@@ -968,21 +1071,27 @@ class Repo(object):
# filename lib/grit.rb
# summary add Blob
# <and rest>
- if firstpart.startswith('filename'):
- info['filename'] = parts[-1]
- elif firstpart.startswith('summary'):
- info['summary'] = parts[-1]
- elif firstpart == '':
+ if firstpart.startswith("filename"):
+ info["filename"] = parts[-1]
+ elif firstpart.startswith("summary"):
+ info["summary"] = parts[-1]
+ elif firstpart == "":
if info:
- sha = info['id']
+ sha = info["id"]
c = commits.get(sha)
if c is None:
- c = Commit(self, hex_to_bin(sha),
- author=Actor._from_string(f"{info['author']} {info['author_email']}"),
- authored_date=info['author_date'],
- committer=Actor._from_string(
- f"{info['committer']} {info['committer_email']}"),
- committed_date=info['committer_date'])
+ c = Commit(
+ self,
+ hex_to_bin(sha),
+ author=Actor._from_string(
+ f"{info['author']} {info['author_email']}"
+ ),
+ authored_date=info["author_date"],
+ committer=Actor._from_string(
+ f"{info['committer']} {info['committer_email']}"
+ ),
+ committed_date=info["committer_date"],
+ )
commits[sha] = c
blames[-1][0] = c
# END if commit objects needs initial creation
@@ -990,7 +1099,7 @@ class Repo(object):
if blames[-1][1] is not None:
line: str | bytes
if not is_binary:
- if line_str and line_str[0] == '\t':
+ if line_str and line_str[0] == "\t":
line_str = line_str[1:]
line = line_str
else:
@@ -1001,16 +1110,22 @@ class Repo(object):
# the last line we have seen.
blames[-1][1].append(line)
- info = {'id': sha}
+ info = {"id": sha}
# END if we collected commit info
# END distinguish filename,summary,rest
# END distinguish author|committer vs filename,summary,rest
# END distinguish hexsha vs other information
return blames
- @ classmethod
- def init(cls, path: Union[PathLike, None] = None, mkdir: bool = True, odbt: Type[GitCmdObjectDB] = GitCmdObjectDB,
- expand_vars: bool = True, **kwargs: Any) -> 'Repo':
+ @classmethod
+ def init(
+ cls,
+ path: Union[PathLike, None] = None,
+ mkdir: bool = True,
+ odbt: Type[GitCmdObjectDB] = GitCmdObjectDB,
+ expand_vars: bool = True,
+ **kwargs: Any,
+ ) -> "Repo":
"""Initialize a git repository at the given path if specified
:param path:
@@ -1047,12 +1162,20 @@ class Repo(object):
git.init(**kwargs)
return cls(path, odbt=odbt)
- @ classmethod
- def _clone(cls, git: 'Git', url: PathLike, path: PathLike, odb_default_type: Type[GitCmdObjectDB],
- progress: Union['RemoteProgress', 'UpdateProgress', Callable[..., 'RemoteProgress'], None] = None,
- multi_options: Optional[List[str]] = None, **kwargs: Any
- ) -> 'Repo':
- odbt = kwargs.pop('odbt', odb_default_type)
+ @classmethod
+ def _clone(
+ cls,
+ git: "Git",
+ url: PathLike,
+ path: PathLike,
+ odb_default_type: Type[GitCmdObjectDB],
+ progress: Union[
+ "RemoteProgress", "UpdateProgress", Callable[..., "RemoteProgress"], None
+ ] = None,
+ multi_options: Optional[List[str]] = None,
+ **kwargs: Any,
+ ) -> "Repo":
+ odbt = kwargs.pop("odbt", odb_default_type)
# when pathlib.Path or other classbased path is passed
if not isinstance(path, str):
@@ -1064,23 +1187,36 @@ class Repo(object):
# becomes::
# git clone --bare /cygwin/d/foo.git /cygwin/d/C:\\Work
#
- clone_path = (Git.polish_url(path)
- if Git.is_cygwin() and 'bare' in kwargs
- else path)
- sep_dir = kwargs.get('separate_git_dir')
+ clone_path = (
+ Git.polish_url(path) if Git.is_cygwin() and "bare" in kwargs else path
+ )
+ sep_dir = kwargs.get("separate_git_dir")
if sep_dir:
- kwargs['separate_git_dir'] = Git.polish_url(sep_dir)
+ kwargs["separate_git_dir"] = Git.polish_url(sep_dir)
multi = None
if multi_options:
- multi = shlex.split(' '.join(multi_options))
- proc = git.clone(multi, Git.polish_url(str(url)), clone_path, with_extended_output=True, as_process=True,
- v=True, universal_newlines=True, **add_progress(kwargs, git, progress))
+ multi = shlex.split(" ".join(multi_options))
+ proc = git.clone(
+ multi,
+ Git.polish_url(str(url)),
+ clone_path,
+ with_extended_output=True,
+ as_process=True,
+ v=True,
+ universal_newlines=True,
+ **add_progress(kwargs, git, progress),
+ )
if progress:
- handle_process_output(proc, None, to_progress_instance(progress).new_message_handler(),
- finalize_process, decode_streams=False)
+ handle_process_output(
+ proc,
+ None,
+ to_progress_instance(progress).new_message_handler(),
+ finalize_process,
+ decode_streams=False,
+ )
else:
(stdout, stderr) = proc.communicate()
- cmdline = getattr(proc, 'args', '')
+ cmdline = getattr(proc, "args", "")
cmdline = remove_password_if_present(cmdline)
log.debug("Cmd(%s)'s unused stdout: %s", cmdline, stdout)
@@ -1089,7 +1225,11 @@ class Repo(object):
# our git command could have a different working dir than our actual
# environment, hence we prepend its working dir if required
if not osp.isabs(path):
- path = osp.join(git._working_dir, path) if git._working_dir is not None else path
+ path = (
+ osp.join(git._working_dir, path)
+ if git._working_dir is not None
+ else path
+ )
repo = cls(path, odbt=odbt)
@@ -1103,12 +1243,17 @@ class Repo(object):
# sure
if repo.remotes:
with repo.remotes[0].config_writer as writer:
- writer.set_value('url', Git.polish_url(repo.remotes[0].url))
+ writer.set_value("url", Git.polish_url(repo.remotes[0].url))
# END handle remote repo
return repo
- def clone(self, path: PathLike, progress: Optional[Callable] = None,
- multi_options: Optional[List[str]] = None, **kwargs: Any) -> 'Repo':
+ def clone(
+ self,
+ path: PathLike,
+ progress: Optional[Callable] = None,
+ multi_options: Optional[List[str]] = None,
+ **kwargs: Any,
+ ) -> "Repo":
"""Create a clone from this repository.
:param path: is the full path of the new repo (traditionally ends with ./<name>.git).
@@ -1123,12 +1268,26 @@ class Repo(object):
* All remaining keyword arguments are given to the git-clone command
:return: ``git.Repo`` (the newly cloned repo)"""
- return self._clone(self.git, self.common_dir, path, type(self.odb), progress, multi_options, **kwargs)
-
- @ classmethod
- def clone_from(cls, url: PathLike, to_path: PathLike, progress: Optional[Callable] = None,
- env: Optional[Mapping[str, str]] = None,
- multi_options: Optional[List[str]] = None, **kwargs: Any) -> 'Repo':
+ return self._clone(
+ self.git,
+ self.common_dir,
+ path,
+ type(self.odb),
+ progress,
+ multi_options,
+ **kwargs,
+ )
+
+ @classmethod
+ def clone_from(
+ cls,
+ url: PathLike,
+ to_path: PathLike,
+ progress: Optional[Callable] = None,
+ env: Optional[Mapping[str, str]] = None,
+ multi_options: Optional[List[str]] = None,
+ **kwargs: Any,
+ ) -> "Repo":
"""Create a clone from the given URL
:param url: valid git url, see http://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
@@ -1146,10 +1305,17 @@ class Repo(object):
git = cls.GitCommandWrapperType(os.getcwd())
if env is not None:
git.update_environment(**env)
- return cls._clone(git, url, to_path, GitCmdObjectDB, progress, multi_options, **kwargs)
-
- def archive(self, ostream: Union[TextIO, BinaryIO], treeish: Optional[str] = None,
- prefix: Optional[str] = None, **kwargs: Any) -> Repo:
+ return cls._clone(
+ git, url, to_path, GitCmdObjectDB, progress, multi_options, **kwargs
+ )
+
+ def archive(
+ self,
+ ostream: Union[TextIO, BinaryIO],
+ treeish: Optional[str] = None,
+ prefix: Optional[str] = None,
+ **kwargs: Any,
+ ) -> Repo:
"""Archive the tree at the given revision.
:param ostream: file compatible stream object to which the archive will be written as bytes
@@ -1166,10 +1332,10 @@ class Repo(object):
:return: self"""
if treeish is None:
treeish = self.head.commit
- if prefix and 'prefix' not in kwargs:
- kwargs['prefix'] = prefix
- kwargs['output_stream'] = ostream
- path = kwargs.pop('path', [])
+ if prefix and "prefix" not in kwargs:
+ kwargs["prefix"] = prefix
+ kwargs["output_stream"] = ostream
+ path = kwargs.pop("path", [])
path = cast(Union[PathLike, List[PathLike], Tuple[PathLike, ...]], path)
if not isinstance(path, (tuple, list)):
path = [path]
@@ -1186,7 +1352,7 @@ class Repo(object):
if self.bare:
return False
if self.working_tree_dir:
- return osp.isfile(osp.join(self.working_tree_dir, '.git'))
+ return osp.isfile(osp.join(self.working_tree_dir, ".git"))
else:
return False # or raise Error?
@@ -1194,7 +1360,7 @@ class Repo(object):
def __repr__(self) -> str:
clazz = self.__class__
- return '<%s.%s %r>' % (clazz.__module__, clazz.__name__, self.git_dir)
+ return "<%s.%s %r>" % (clazz.__module__, clazz.__name__, self.git_dir)
def currently_rebasing_on(self) -> Commit | None:
"""
diff --git a/git/repo/fun.py b/git/repo/fun.py
index 74c0657d..03f9cabb 100644
--- a/git/repo/fun.py
+++ b/git/repo/fun.py
@@ -31,8 +31,17 @@ if TYPE_CHECKING:
# ----------------------------------------------------------------------------
-__all__ = ('rev_parse', 'is_git_dir', 'touch', 'find_submodule_git_dir', 'name_to_object', 'short_to_long', 'deref_tag',
- 'to_commit', 'find_worktree_git_dir')
+__all__ = (
+ "rev_parse",
+ "is_git_dir",
+ "touch",
+ "find_submodule_git_dir",
+ "name_to_object",
+ "short_to_long",
+ "deref_tag",
+ "to_commit",
+ "find_worktree_git_dir",
+)
def touch(filename: str) -> str:
@@ -41,8 +50,8 @@ def touch(filename: str) -> str:
return filename
-def is_git_dir(d: 'PathLike') -> bool:
- """ This is taken from the git setup.c:is_git_directory
+def is_git_dir(d: "PathLike") -> bool:
+ """This is taken from the git setup.c:is_git_directory
function.
@throws WorkTreeRepositoryUnsupported if it sees a worktree directory. It's quite hacky to do that here,
@@ -50,20 +59,23 @@ def is_git_dir(d: 'PathLike') -> bool:
There is the unlikely danger to throw if we see directories which just look like a worktree dir,
but are none."""
if osp.isdir(d):
- if (osp.isdir(osp.join(d, 'objects')) or 'GIT_OBJECT_DIRECTORY' in os.environ) \
- and osp.isdir(osp.join(d, 'refs')):
- headref = osp.join(d, 'HEAD')
- return osp.isfile(headref) or \
- (osp.islink(headref) and
- os.readlink(headref).startswith('refs'))
- elif (osp.isfile(osp.join(d, 'gitdir')) and
- osp.isfile(osp.join(d, 'commondir')) and
- osp.isfile(osp.join(d, 'gitfile'))):
+ if (
+ osp.isdir(osp.join(d, "objects")) or "GIT_OBJECT_DIRECTORY" in os.environ
+ ) and osp.isdir(osp.join(d, "refs")):
+ headref = osp.join(d, "HEAD")
+ return osp.isfile(headref) or (
+ osp.islink(headref) and os.readlink(headref).startswith("refs")
+ )
+ elif (
+ osp.isfile(osp.join(d, "gitdir"))
+ and osp.isfile(osp.join(d, "commondir"))
+ and osp.isfile(osp.join(d, "gitfile"))
+ ):
raise WorkTreeRepositoryUnsupported(d)
return False
-def find_worktree_git_dir(dotgit: 'PathLike') -> Optional[str]:
+def find_worktree_git_dir(dotgit: "PathLike") -> Optional[str]:
"""Search for a gitdir for this worktree."""
try:
statbuf = os.stat(dotgit)
@@ -73,16 +85,16 @@ def find_worktree_git_dir(dotgit: 'PathLike') -> Optional[str]:
return None
try:
- lines = open(dotgit, 'r').readlines()
- for key, value in [line.strip().split(': ') for line in lines]:
- if key == 'gitdir':
+ lines = open(dotgit, "r").readlines()
+ for key, value in [line.strip().split(": ") for line in lines]:
+ if key == "gitdir":
return value
except ValueError:
pass
return None
-def find_submodule_git_dir(d: 'PathLike') -> Optional['PathLike']:
+def find_submodule_git_dir(d: "PathLike") -> Optional["PathLike"]:
"""Search for a submodule repo."""
if is_git_dir(d):
return d
@@ -94,7 +106,7 @@ def find_submodule_git_dir(d: 'PathLike') -> Optional['PathLike']:
# it's probably not a file
pass
else:
- if content.startswith('gitdir: '):
+ if content.startswith("gitdir: "):
path = content[8:]
if Git.is_cygwin():
@@ -107,7 +119,7 @@ def find_submodule_git_dir(d: 'PathLike') -> Optional['PathLike']:
return None
-def short_to_long(odb: 'GitCmdObjectDB', hexsha: str) -> Optional[bytes]:
+def short_to_long(odb: "GitCmdObjectDB", hexsha: str) -> Optional[bytes]:
""":return: long hexadecimal sha1 from the given less-than-40 byte hexsha
or None if no candidate could be found.
:param hexsha: hexsha with less than 40 byte"""
@@ -118,8 +130,9 @@ def short_to_long(odb: 'GitCmdObjectDB', hexsha: str) -> Optional[bytes]:
# END exception handling
-def name_to_object(repo: 'Repo', name: str, return_ref: bool = False
- ) -> Union[SymbolicReference, 'Commit', 'TagObject', 'Blob', 'Tree']:
+def name_to_object(
+ repo: "Repo", name: str, return_ref: bool = False
+) -> Union[SymbolicReference, "Commit", "TagObject", "Blob", "Tree"]:
"""
:return: object specified by the given name, hexshas ( short and long )
as well as references are supported
@@ -141,7 +154,14 @@ def name_to_object(repo: 'Repo', name: str, return_ref: bool = False
# if we couldn't find an object for what seemed to be a short hexsha
# try to find it as reference anyway, it could be named 'aaa' for instance
if hexsha is None:
- for base in ('%s', 'refs/%s', 'refs/tags/%s', 'refs/heads/%s', 'refs/remotes/%s', 'refs/remotes/%s/HEAD'):
+ for base in (
+ "%s",
+ "refs/%s",
+ "refs/tags/%s",
+ "refs/heads/%s",
+ "refs/remotes/%s",
+ "refs/remotes/%s/HEAD",
+ ):
try:
hexsha = SymbolicReference.dereference_recursive(repo, base % name)
if return_ref:
@@ -166,7 +186,7 @@ def name_to_object(repo: 'Repo', name: str, return_ref: bool = False
return Object.new_from_sha(repo, hex_to_bin(hexsha))
-def deref_tag(tag: 'Tag') -> 'TagObject':
+def deref_tag(tag: "Tag") -> "TagObject":
"""Recursively dereference a tag and return the resulting object"""
while True:
try:
@@ -177,9 +197,9 @@ def deref_tag(tag: 'Tag') -> 'TagObject':
return tag
-def to_commit(obj: Object) -> Union['Commit', 'TagObject']:
+def to_commit(obj: Object) -> Union["Commit", "TagObject"]:
"""Convert the given object to a commit if possible and return it"""
- if obj.type == 'tag':
+ if obj.type == "tag":
obj = deref_tag(obj)
if obj.type != "commit":
@@ -188,7 +208,7 @@ def to_commit(obj: Object) -> Union['Commit', 'TagObject']:
return obj
-def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
+def rev_parse(repo: "Repo", rev: str) -> Union["Commit", "Tag", "Tree", "Blob"]:
"""
:return: Object at the given revision, either Commit, Tag, Tree or Blob
:param rev: git-rev-parse compatible revision specification as string, please see
@@ -199,12 +219,12 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
:raise IndexError: If invalid reflog index is specified"""
# colon search mode ?
- if rev.startswith(':/'):
+ if rev.startswith(":/"):
# colon search mode
raise NotImplementedError("commit by message search ( regex )")
# END handle search
- obj: Union[Commit_ish, 'Reference', None] = None
+ obj: Union[Commit_ish, "Reference", None] = None
ref = None
output_type = "commit"
start = 0
@@ -223,8 +243,10 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
if start == 0:
ref = repo.head.ref
else:
- if token == '@':
- ref = cast('Reference', name_to_object(repo, rev[:start], return_ref=True))
+ if token == "@":
+ ref = cast(
+ "Reference", name_to_object(repo, rev[:start], return_ref=True)
+ )
else:
obj = cast(Commit_ish, name_to_object(repo, rev[:start]))
# END handle token
@@ -233,38 +255,38 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
assert obj is not None
if ref is not None:
- obj = cast('Commit', ref.commit)
+ obj = cast("Commit", ref.commit)
# END handle ref
# END initialize obj on first token
start += 1
# try to parse {type}
- if start < lr and rev[start] == '{':
- end = rev.find('}', start)
+ if start < lr and rev[start] == "{":
+ end = rev.find("}", start)
if end == -1:
raise ValueError("Missing closing brace to define type in %s" % rev)
- output_type = rev[start + 1:end] # exclude brace
+ output_type = rev[start + 1 : end] # exclude brace
# handle type
- if output_type == 'commit':
+ if output_type == "commit":
pass # default
- elif output_type == 'tree':
+ elif output_type == "tree":
try:
obj = cast(Commit_ish, obj)
obj = to_commit(obj).tree
except (AttributeError, ValueError):
- pass # error raised later
+ pass # error raised later
# END exception handling
- elif output_type in ('', 'blob'):
- obj = cast('TagObject', obj)
- if obj and obj.type == 'tag':
+ elif output_type in ("", "blob"):
+ obj = cast("TagObject", obj)
+ if obj and obj.type == "tag":
obj = deref_tag(obj)
else:
# cannot do anything for non-tags
pass
# END handle tag
- elif token == '@':
+ elif token == "@":
# try single int
assert ref is not None, "Require Reference to access reflog"
revlog_index = None
@@ -274,7 +296,9 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
except ValueError as e:
# TODO: Try to parse the other date options, using parse_date
# maybe
- raise NotImplementedError("Support for additional @{...} modes not implemented") from e
+ raise NotImplementedError(
+ "Support for additional @{...} modes not implemented"
+ ) from e
# END handle revlog index
try:
@@ -286,17 +310,22 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
obj = Object.new_from_sha(repo, hex_to_bin(entry.newhexsha))
# make it pass the following checks
- output_type = ''
+ output_type = ""
else:
- raise ValueError("Invalid output type: %s ( in %s )" % (output_type, rev))
+ raise ValueError(
+ "Invalid output type: %s ( in %s )" % (output_type, rev)
+ )
# END handle output type
# empty output types don't require any specific type, its just about dereferencing tags
if output_type and obj and obj.type != output_type:
- raise ValueError("Could not accommodate requested object type %r, got %s" % (output_type, obj.type))
+ raise ValueError(
+ "Could not accommodate requested object type %r, got %s"
+ % (output_type, obj.type)
+ )
# END verify output type
- start = end + 1 # skip brace
+ start = end + 1 # skip brace
parsed_to = start
continue
# END parse type
@@ -348,7 +377,8 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
except (IndexError, AttributeError) as e:
raise BadName(
f"Invalid revision spec '{rev}' - not enough "
- f"parent commits to reach '{token}{int(num)}'") from e
+ f"parent commits to reach '{token}{int(num)}'"
+ ) from e
# END exception handling
# END parse loop
@@ -362,6 +392,9 @@ def rev_parse(repo: 'Repo', rev: str) -> Union['Commit', 'Tag', 'Tree', 'Blob']:
raise ValueError("Revision specifier could not be parsed: %s" % rev)
if parsed_to != lr:
- raise ValueError("Didn't consume complete rev spec %s, consumed part: %s" % (rev, rev[:parsed_to]))
+ raise ValueError(
+ "Didn't consume complete rev spec %s, consumed part: %s"
+ % (rev, rev[:parsed_to])
+ )
return obj
diff --git a/git/types.py b/git/types.py
index 7f44ba24..24df887a 100644
--- a/git/types.py
+++ b/git/types.py
@@ -4,14 +4,38 @@
import os
import sys
-from typing import (Callable, Dict, NoReturn, Sequence, Tuple, Union, Any, Iterator, # noqa: F401
- NamedTuple, TYPE_CHECKING, TypeVar) # noqa: F401
+from typing import (
+ Callable,
+ Dict,
+ NoReturn,
+ Sequence,
+ Tuple,
+ Union,
+ Any,
+ Iterator, # noqa: F401
+ NamedTuple,
+ TYPE_CHECKING,
+ TypeVar,
+) # noqa: F401
if sys.version_info[:2] >= (3, 8):
- from typing import Final, Literal, SupportsIndex, TypedDict, Protocol, runtime_checkable # noqa: F401
+ from typing import (
+ Final,
+ Literal,
+ SupportsIndex,
+ TypedDict,
+ Protocol,
+ runtime_checkable,
+ ) # noqa: F401
else:
- from typing_extensions import (Final, Literal, SupportsIndex, # noqa: F401
- TypedDict, Protocol, runtime_checkable) # noqa: F401
+ from typing_extensions import (
+ Final,
+ Literal,
+ SupportsIndex, # noqa: F401
+ TypedDict,
+ Protocol,
+ runtime_checkable,
+ ) # noqa: F401
# if sys.version_info[:2] >= (3, 10):
# from typing import TypeGuard # noqa: F401
@@ -28,18 +52,19 @@ elif sys.version_info[:2] >= (3, 9):
if TYPE_CHECKING:
from git.repo import Repo
from git.objects import Commit, Tree, TagObject, Blob
+
# from git.refs import SymbolicReference
TBD = Any
-_T = TypeVar('_T')
+_T = TypeVar("_T")
-Tree_ish = Union['Commit', 'Tree']
-Commit_ish = Union['Commit', 'TagObject', 'Blob', 'Tree']
-Lit_commit_ish = Literal['commit', 'tag', 'blob', 'tree']
+Tree_ish = Union["Commit", "Tree"]
+Commit_ish = Union["Commit", "TagObject", "Blob", "Tree"]
+Lit_commit_ish = Literal["commit", "tag", "blob", "tree"]
# Config_levels ---------------------------------------------------------
-Lit_config_levels = Literal['system', 'global', 'user', 'repository']
+Lit_config_levels = Literal["system", "global", "user", "repository"]
# def is_config_level(inp: str) -> TypeGuard[Lit_config_levels]:
@@ -47,12 +72,16 @@ Lit_config_levels = Literal['system', 'global', 'user', 'repository']
# return inp in ("system", "user", "global", "repository")
-ConfigLevels_Tup = Tuple[Literal['system'], Literal['user'], Literal['global'], Literal['repository']]
+ConfigLevels_Tup = Tuple[
+ Literal["system"], Literal["user"], Literal["global"], Literal["repository"]
+]
-#-----------------------------------------------------------------------------------
+# -----------------------------------------------------------------------------------
-def assert_never(inp: NoReturn, raise_error: bool = True, exc: Union[Exception, None] = None) -> None:
+def assert_never(
+ inp: NoReturn, raise_error: bool = True, exc: Union[Exception, None] = None
+) -> None:
"""For use in exhaustive checking of literal or Enum in if/else chain.
Should only be reached if all members not handled OR attempt to pass non-members through chain.
@@ -63,7 +92,9 @@ def assert_never(inp: NoReturn, raise_error: bool = True, exc: Union[Exception,
"""
if raise_error:
if exc is None:
- raise ValueError(f"An unhandled Literal ({inp}) in an if/else chain was found")
+ raise ValueError(
+ f"An unhandled Literal ({inp}) in an if/else chain was found"
+ )
else:
raise exc
else:
@@ -90,7 +121,7 @@ class HSH_TD(TypedDict):
@runtime_checkable
class Has_Repo(Protocol):
- repo: 'Repo'
+ repo: "Repo"
@runtime_checkable
diff --git a/git/util.py b/git/util.py
index 0711265a..edc8750d 100644
--- a/git/util.py
+++ b/git/util.py
@@ -26,9 +26,26 @@ import warnings
# typing ---------------------------------------------------------
-from typing import (Any, AnyStr, BinaryIO, Callable, Dict, Generator, IO, Iterator, List,
- Optional, Pattern, Sequence, Tuple, TypeVar, Union, cast,
- TYPE_CHECKING, overload, )
+from typing import (
+ Any,
+ AnyStr,
+ BinaryIO,
+ Callable,
+ Dict,
+ Generator,
+ IO,
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Sequence,
+ Tuple,
+ TypeVar,
+ Union,
+ cast,
+ TYPE_CHECKING,
+ overload,
+)
import pathlib
@@ -37,14 +54,25 @@ if TYPE_CHECKING:
from git.repo.base import Repo
from git.config import GitConfigParser, SectionConstraint
from git import Git
+
# from git.objects.base import IndexObject
-from .types import (Literal, SupportsIndex, Protocol, runtime_checkable, # because behind py version guards
- PathLike, HSH_TD, Total_TD, Files_TD, # aliases
- Has_id_attribute)
+from .types import (
+ Literal,
+ SupportsIndex,
+ Protocol,
+ runtime_checkable, # because behind py version guards
+ PathLike,
+ HSH_TD,
+ Total_TD,
+ Files_TD, # aliases
+ Has_id_attribute,
+)
-T_IterableObj = TypeVar('T_IterableObj', bound=Union['IterableObj', 'Has_id_attribute'], covariant=True)
+T_IterableObj = TypeVar(
+ "T_IterableObj", bound=Union["IterableObj", "Has_id_attribute"], covariant=True
+)
# So IterableList[Head] is subtype of IterableList[IterableObj]
# ---------------------------------------------------------------------
@@ -52,14 +80,14 @@ T_IterableObj = TypeVar('T_IterableObj', bound=Union['IterableObj', 'Has_id_attr
from gitdb.util import ( # NOQA @IgnorePep8
make_sha,
- LockedFD, # @UnusedImport
- file_contents_ro, # @UnusedImport
- file_contents_ro_filepath, # @UnusedImport
- LazyMixin, # @UnusedImport
- to_hex_sha, # @UnusedImport
- to_bin_sha, # @UnusedImport
- bin_to_hex, # @UnusedImport
- hex_to_bin, # @UnusedImport
+ LockedFD, # @UnusedImport
+ file_contents_ro, # @UnusedImport
+ file_contents_ro_filepath, # @UnusedImport
+ LazyMixin, # @UnusedImport
+ to_hex_sha, # @UnusedImport
+ to_bin_sha, # @UnusedImport
+ bin_to_hex, # @UnusedImport
+ hex_to_bin, # @UnusedImport
)
@@ -67,11 +95,26 @@ from gitdb.util import ( # NOQA @IgnorePep8
# Handle once test-cases are back up and running.
# Most of these are unused here, but are for use by git-python modules so these
# don't see gitdb all the time. Flake of course doesn't like it.
-__all__ = ["stream_copy", "join_path", "to_native_path_linux",
- "join_path_native", "Stats", "IndexFileSHA1Writer", "IterableObj", "IterableList",
- "BlockingLockFile", "LockFile", 'Actor', 'get_user_id', 'assure_directory_exists',
- 'RemoteProgress', 'CallableRemoteProgress', 'rmtree', 'unbare_repo',
- 'HIDE_WINDOWS_KNOWN_ERRORS']
+__all__ = [
+ "stream_copy",
+ "join_path",
+ "to_native_path_linux",
+ "join_path_native",
+ "Stats",
+ "IndexFileSHA1Writer",
+ "IterableObj",
+ "IterableList",
+ "BlockingLockFile",
+ "LockFile",
+ "Actor",
+ "get_user_id",
+ "assure_directory_exists",
+ "RemoteProgress",
+ "CallableRemoteProgress",
+ "rmtree",
+ "unbare_repo",
+ "HIDE_WINDOWS_KNOWN_ERRORS",
+]
log = logging.getLogger(__name__)
@@ -81,12 +124,14 @@ log = logging.getLogger(__name__)
#: We need an easy way to see if Appveyor TCs start failing,
#: so the errors marked with this var are considered "acknowledged" ones, awaiting remedy,
#: till then, we wish to hide them.
-HIDE_WINDOWS_KNOWN_ERRORS = is_win and os.environ.get('HIDE_WINDOWS_KNOWN_ERRORS', True)
-HIDE_WINDOWS_FREEZE_ERRORS = is_win and os.environ.get('HIDE_WINDOWS_FREEZE_ERRORS', True)
+HIDE_WINDOWS_KNOWN_ERRORS = is_win and os.environ.get("HIDE_WINDOWS_KNOWN_ERRORS", True)
+HIDE_WINDOWS_FREEZE_ERRORS = is_win and os.environ.get(
+ "HIDE_WINDOWS_FREEZE_ERRORS", True
+)
# { Utility Methods
-T = TypeVar('T')
+T = TypeVar("T")
def unbare_repo(func: Callable[..., T]) -> Callable[..., T]:
@@ -96,11 +141,14 @@ def unbare_repo(func: Callable[..., T]) -> Callable[..., T]:
from .exc import InvalidGitRepositoryError
@wraps(func)
- def wrapper(self: 'Remote', *args: Any, **kwargs: Any) -> T:
+ def wrapper(self: "Remote", *args: Any, **kwargs: Any) -> T:
if self.repo.bare:
- raise InvalidGitRepositoryError("Method '%s' cannot operate on bare repositories" % func.__name__)
+ raise InvalidGitRepositoryError(
+ "Method '%s' cannot operate on bare repositories" % func.__name__
+ )
# END bare method
return func(self, *args, **kwargs)
+
# END wrapper
return wrapper
@@ -131,7 +179,10 @@ def rmtree(path: PathLike) -> None:
except Exception as ex:
if HIDE_WINDOWS_KNOWN_ERRORS:
from unittest import SkipTest
- raise SkipTest("FIXME: fails with: PermissionError\n {}".format(ex)) from ex
+
+ raise SkipTest(
+ "FIXME: fails with: PermissionError\n {}".format(ex)
+ ) from ex
raise
return shutil.rmtree(path, False, onerror)
@@ -145,7 +196,9 @@ def rmfile(path: PathLike) -> None:
os.remove(path)
-def stream_copy(source: BinaryIO, destination: BinaryIO, chunk_size: int = 512 * 1024) -> int:
+def stream_copy(
+ source: BinaryIO, destination: BinaryIO, chunk_size: int = 512 * 1024
+) -> int:
"""Copy all data from the source stream into the destination stream in chunks
of size chunk_size
@@ -169,24 +222,25 @@ def join_path(a: PathLike, *p: PathLike) -> PathLike:
b = str(b)
if not b:
continue
- if b.startswith('/'):
+ if b.startswith("/"):
path += b[1:]
- elif path == '' or path.endswith('/'):
+ elif path == "" or path.endswith("/"):
path += b
else:
- path += '/' + b
+ path += "/" + b
# END for each path token to add
return path
if is_win:
+
def to_native_path_windows(path: PathLike) -> PathLike:
path = str(path)
- return path.replace('/', '\\')
+ return path.replace("/", "\\")
def to_native_path_linux(path: PathLike) -> str:
path = str(path)
- return path.replace('\\', '/')
+ return path.replace("\\", "/")
__all__.append("to_native_path_windows")
to_native_path = to_native_path_windows
@@ -222,10 +276,14 @@ def assure_directory_exists(path: PathLike, is_file: bool = False) -> bool:
def _get_exe_extensions() -> Sequence[str]:
- PATHEXT = os.environ.get('PATHEXT', None)
- return tuple(p.upper() for p in PATHEXT.split(os.pathsep)) if PATHEXT \
- else ('.BAT', 'COM', '.EXE') if is_win \
- else ('')
+ PATHEXT = os.environ.get("PATHEXT", None)
+ return (
+ tuple(p.upper() for p in PATHEXT.split(os.pathsep))
+ if PATHEXT
+ else (".BAT", "COM", ".EXE")
+ if is_win
+ else ("")
+ )
def py_where(program: str, path: Optional[PathLike] = None) -> List[str]:
@@ -233,9 +291,15 @@ def py_where(program: str, path: Optional[PathLike] = None) -> List[str]:
winprog_exts = _get_exe_extensions()
def is_exec(fpath: str) -> bool:
- return osp.isfile(fpath) and os.access(fpath, os.X_OK) and (
- os.name != 'nt' or not winprog_exts or any(fpath.upper().endswith(ext)
- for ext in winprog_exts))
+ return (
+ osp.isfile(fpath)
+ and os.access(fpath, os.X_OK)
+ and (
+ os.name != "nt"
+ or not winprog_exts
+ or any(fpath.upper().endswith(ext) for ext in winprog_exts)
+ )
+ )
progs = []
if not path:
@@ -244,7 +308,7 @@ def py_where(program: str, path: Optional[PathLike] = None) -> List[str]:
folder = folder.strip('"')
if folder:
exe_path = osp.join(folder, program)
- for f in [exe_path] + ['%s%s' % (exe_path, e) for e in winprog_exts]:
+ for f in [exe_path] + ["%s%s" % (exe_path, e) for e in winprog_exts]:
if is_exec(f):
progs.append(f)
return progs
@@ -264,38 +328,26 @@ def _cygexpath(drive: Optional[str], path: str) -> str:
else:
p = cygpath(p)
elif drive:
- p = '/cygdrive/%s/%s' % (drive.lower(), p)
+ p = "/cygdrive/%s/%s" % (drive.lower(), p)
p_str = str(p) # ensure it is a str and not AnyPath
- return p_str.replace('\\', '/')
+ return p_str.replace("\\", "/")
_cygpath_parsers: Tuple[Tuple[Pattern[str], Callable, bool], ...] = (
# See: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
# and: https://www.cygwin.com/cygwin-ug-net/using.html#unc-paths
- (re.compile(r"\\\\\?\\UNC\\([^\\]+)\\([^\\]+)(?:\\(.*))?"),
- (lambda server, share, rest_path: '//%s/%s/%s' % (server, share, rest_path.replace('\\', '/'))),
- False
- ),
-
- (re.compile(r"\\\\\?\\(\w):[/\\](.*)"),
- (_cygexpath),
- False
- ),
-
- (re.compile(r"(\w):[/\\](.*)"),
- (_cygexpath),
- False
- ),
-
- (re.compile(r"file:(.*)", re.I),
- (lambda rest_path: rest_path),
- True
- ),
-
- (re.compile(r"(\w{2,}:.*)"), # remote URL, do nothing
- (lambda url: url),
- False
- ),
+ (
+ re.compile(r"\\\\\?\\UNC\\([^\\]+)\\([^\\]+)(?:\\(.*))?"),
+ (
+ lambda server, share, rest_path: "//%s/%s/%s"
+ % (server, share, rest_path.replace("\\", "/"))
+ ),
+ False,
+ ),
+ (re.compile(r"\\\\\?\\(\w):[/\\](.*)"), (_cygexpath), False),
+ (re.compile(r"(\w):[/\\](.*)"), (_cygexpath), False),
+ (re.compile(r"file:(.*)", re.I), (lambda rest_path: rest_path), True),
+ (re.compile(r"(\w{2,}:.*)"), (lambda url: url), False), # remote URL, do nothing
)
@@ -303,7 +355,7 @@ def cygpath(path: str) -> str:
"""Use :meth:`git.cmd.Git.polish_url()` instead, that works on any environment."""
path = str(path) # ensure is str and not AnyPath.
# Fix to use Paths when 3.5 dropped. or to be just str if only for urls?
- if not path.startswith(('/cygdrive', '//')):
+ if not path.startswith(("/cygdrive", "//")):
for regex, parser, recurse in _cygpath_parsers:
match = regex.match(path)
if match:
@@ -325,9 +377,9 @@ def decygpath(path: PathLike) -> str:
m = _decygpath_regex.match(path)
if m:
drive, rest_path = m.groups()
- path = '%s:%s' % (drive.upper(), rest_path or '')
+ path = "%s:%s" % (drive.upper(), rest_path or "")
- return path.replace('/', '\\')
+ return path.replace("/", "\\")
#: Store boolean flags denoting if a specific Git executable
@@ -363,14 +415,15 @@ def is_cygwin_git(git_executable: Union[None, PathLike]) -> bool:
git_dir = osp.dirname(res[0]) if res else ""
# Just a name given, not a real path.
- uname_cmd = osp.join(git_dir, 'uname')
- process = subprocess.Popen([uname_cmd], stdout=subprocess.PIPE,
- universal_newlines=True)
+ uname_cmd = osp.join(git_dir, "uname")
+ process = subprocess.Popen(
+ [uname_cmd], stdout=subprocess.PIPE, universal_newlines=True
+ )
uname_out, _ = process.communicate()
- #retcode = process.poll()
- is_cygwin = 'CYGWIN' in uname_out
+ # retcode = process.poll()
+ is_cygwin = "CYGWIN" in uname_out
except Exception as ex:
- log.debug('Failed checking if running in CYGWIN due to: %r', ex)
+ log.debug("Failed checking if running in CYGWIN due to: %r", ex)
_is_cygwin_cache[git_executable] = is_cygwin
return is_cygwin
@@ -381,7 +434,9 @@ def get_user_id() -> str:
return "%s@%s" % (getpass.getuser(), platform.node())
-def finalize_process(proc: Union[subprocess.Popen, 'Git.AutoInterrupt'], **kwargs: Any) -> None:
+def finalize_process(
+ proc: Union[subprocess.Popen, "Git.AutoInterrupt"], **kwargs: Any
+) -> None:
"""Wait for the process (clone, fetch, pull or push) and handle its errors accordingly"""
# TODO: No close proc-streams??
proc.wait(**kwargs)
@@ -398,13 +453,15 @@ def expand_path(p: PathLike, expand_vars: bool = ...) -> str:
...
-def expand_path(p: Union[None, PathLike], expand_vars: bool = True) -> Optional[PathLike]:
+def expand_path(
+ p: Union[None, PathLike], expand_vars: bool = True
+) -> Optional[PathLike]:
if isinstance(p, pathlib.Path):
return p.resolve()
try:
p = osp.expanduser(p) # type: ignore
if expand_vars:
- p = osp.expandvars(p) # type: ignore
+ p = osp.expandvars(p) # type: ignore
return osp.normpath(osp.abspath(p)) # type: ignore
except Exception:
return None
@@ -430,11 +487,9 @@ def remove_password_if_present(cmdline: Sequence[str]) -> List[str]:
continue
if url.password is not None:
- url = url._replace(
- netloc=url.netloc.replace(url.password, "*****"))
+ url = url._replace(netloc=url.netloc.replace(url.password, "*****"))
if url.username is not None:
- url = url._replace(
- netloc=url.netloc.replace(url.username, "*****"))
+ url = url._replace(netloc=url.netloc.replace(url.username, "*****"))
new_cmdline[index] = urlunsplit(url)
except ValueError:
# This is not a valid URL
@@ -452,19 +507,31 @@ class RemoteProgress(object):
Handler providing an interface to parse progress information emitted by git-push
and git-fetch and to dispatch callbacks allowing subclasses to react to the progress.
"""
+
_num_op_codes: int = 9
- BEGIN, END, COUNTING, COMPRESSING, WRITING, RECEIVING, RESOLVING, FINDING_SOURCES, CHECKING_OUT = \
- [1 << x for x in range(_num_op_codes)]
+ (
+ BEGIN,
+ END,
+ COUNTING,
+ COMPRESSING,
+ WRITING,
+ RECEIVING,
+ RESOLVING,
+ FINDING_SOURCES,
+ CHECKING_OUT,
+ ) = [1 << x for x in range(_num_op_codes)]
STAGE_MASK = BEGIN | END
OP_MASK = ~STAGE_MASK
- DONE_TOKEN = 'done.'
- TOKEN_SEPARATOR = ', '
+ DONE_TOKEN = "done."
+ TOKEN_SEPARATOR = ", "
- __slots__ = ('_cur_line',
- '_seen_ops',
- 'error_lines', # Lines that started with 'error:' or 'fatal:'.
- 'other_lines') # Lines not denoting progress (i.e.g. push-infos).
+ __slots__ = (
+ "_cur_line",
+ "_seen_ops",
+ "error_lines", # Lines that started with 'error:' or 'fatal:'.
+ "other_lines",
+ ) # Lines not denoting progress (i.e.g. push-infos).
re_op_absolute = re.compile(r"(remote: )?([\w\s]+):\s+()(\d+)()(.*)")
re_op_relative = re.compile(r"(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)")
@@ -486,13 +553,13 @@ class RemoteProgress(object):
# Compressing objects: 50% (1/2)
# Compressing objects: 100% (2/2)
# Compressing objects: 100% (2/2), done.
- if isinstance(line, bytes): # mypy argues about ternary assignment
- line_str = line.decode('utf-8')
+ if isinstance(line, bytes): # mypy argues about ternary assignment
+ line_str = line.decode("utf-8")
else:
line_str = line
self._cur_line = line_str
- if self._cur_line.startswith(('error:', 'fatal:')):
+ if self._cur_line.startswith(("error:", "fatal:")):
self.error_lines.append(self._cur_line)
return
@@ -531,13 +598,13 @@ class RemoteProgress(object):
op_code |= self.COMPRESSING
elif op_name == "Writing objects":
op_code |= self.WRITING
- elif op_name == 'Receiving objects':
+ elif op_name == "Receiving objects":
op_code |= self.RECEIVING
- elif op_name == 'Resolving deltas':
+ elif op_name == "Resolving deltas":
op_code |= self.RESOLVING
- elif op_name == 'Finding sources':
+ elif op_name == "Finding sources":
op_code |= self.FINDING_SOURCES
- elif op_name == 'Checking out files':
+ elif op_name == "Checking out files":
op_code |= self.CHECKING_OUT
else:
# Note: On windows it can happen that partial lines are sent
@@ -559,28 +626,32 @@ class RemoteProgress(object):
# END begin opcode
if message is None:
- message = ''
+ message = ""
# END message handling
message = message.strip()
if message.endswith(self.DONE_TOKEN):
op_code |= self.END
- message = message[:-len(self.DONE_TOKEN)]
+ message = message[: -len(self.DONE_TOKEN)]
# END end message handling
message = message.strip(self.TOKEN_SEPARATOR)
- self.update(op_code,
- cur_count and float(cur_count),
- max_count and float(max_count),
- message)
+ self.update(
+ op_code,
+ cur_count and float(cur_count),
+ max_count and float(max_count),
+ message,
+ )
def new_message_handler(self) -> Callable[[str], None]:
"""
:return:
a progress handler suitable for handle_process_output(), passing lines on to this Progress
handler in a suitable format"""
+
def handler(line: AnyStr) -> None:
return self._parse_progress_line(line.rstrip())
+
# end
return handler
@@ -588,8 +659,13 @@ class RemoteProgress(object):
"""Called whenever a line could not be understood and was therefore dropped."""
pass
- def update(self, op_code: int, cur_count: Union[str, float], max_count: Union[str, float, None] = None,
- message: str = '',) -> None:
+ def update(
+ self,
+ op_code: int,
+ cur_count: Union[str, float],
+ max_count: Union[str, float, None] = None,
+ message: str = "",
+ ) -> None:
"""Called whenever the progress changes
:param op_code:
@@ -618,7 +694,8 @@ class RemoteProgress(object):
class CallableRemoteProgress(RemoteProgress):
"""An implementation forwarding updates to any callable"""
- __slots__ = ('_callable')
+
+ __slots__ = "_callable"
def __init__(self, fn: Callable) -> None:
self._callable = fn
@@ -632,9 +709,10 @@ class Actor(object):
"""Actors hold information about a person acting on the repository. They
can be committers and authors or anything with a name and an email as
mentioned in the git log entries."""
+
# PRECOMPILED REGEX
- name_only_regex = re.compile(r'<(.*)>')
- name_email_regex = re.compile(r'(.*) <(.*?)>')
+ name_only_regex = re.compile(r"<(.*)>")
+ name_email_regex = re.compile(r"(.*) <(.*?)>")
# ENVIRONMENT VARIABLES
# read when creating new commits
@@ -644,10 +722,10 @@ class Actor(object):
env_committer_email = "GIT_COMMITTER_EMAIL"
# CONFIGURATION KEYS
- conf_name = 'name'
- conf_email = 'email'
+ conf_name = "name"
+ conf_email = "email"
- __slots__ = ('name', 'email')
+ __slots__ = ("name", "email")
def __init__(self, name: Optional[str], email: Optional[str]) -> None:
self.name = name
@@ -669,13 +747,13 @@ class Actor(object):
return '<git.Actor "%s <%s>">' % (self.name, self.email)
@classmethod
- def _from_string(cls, string: str) -> 'Actor':
+ def _from_string(cls, string: str) -> "Actor":
"""Create an Actor from a string.
:param string: is the string, which is expected to be in regular git format
John Doe <jdoe@example.com>
- :return: Actor """
+ :return: Actor"""
m = cls.name_email_regex.search(string)
if m:
name, email = m.groups()
@@ -690,9 +768,13 @@ class Actor(object):
# END handle name/email matching
@classmethod
- def _main_actor(cls, env_name: str, env_email: str,
- config_reader: Union[None, 'GitConfigParser', 'SectionConstraint'] = None) -> 'Actor':
- actor = Actor('', '')
+ def _main_actor(
+ cls,
+ env_name: str,
+ env_email: str,
+ config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None,
+ ) -> "Actor":
+ actor = Actor("", "")
user_id = None # We use this to avoid multiple calls to getpass.getuser()
def default_email() -> str:
@@ -702,17 +784,19 @@ class Actor(object):
return user_id
def default_name() -> str:
- return default_email().split('@')[0]
+ return default_email().split("@")[0]
- for attr, evar, cvar, default in (('name', env_name, cls.conf_name, default_name),
- ('email', env_email, cls.conf_email, default_email)):
+ for attr, evar, cvar, default in (
+ ("name", env_name, cls.conf_name, default_name),
+ ("email", env_email, cls.conf_email, default_email),
+ ):
try:
val = os.environ[evar]
setattr(actor, attr, val)
except KeyError:
if config_reader is not None:
try:
- val = config_reader.get('user', cvar)
+ val = config_reader.get("user", cvar)
except Exception:
val = default()
setattr(actor, attr, val)
@@ -724,7 +808,9 @@ class Actor(object):
return actor
@classmethod
- def committer(cls, config_reader: Union[None, 'GitConfigParser', 'SectionConstraint'] = None) -> 'Actor':
+ def committer(
+ cls, config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None
+ ) -> "Actor":
"""
:return: Actor instance corresponding to the configured committer. It behaves
similar to the git implementation, such that the environment will override
@@ -732,10 +818,14 @@ class Actor(object):
generated
:param config_reader: ConfigReader to use to retrieve the values from in case
they are not set in the environment"""
- return cls._main_actor(cls.env_committer_name, cls.env_committer_email, config_reader)
+ return cls._main_actor(
+ cls.env_committer_name, cls.env_committer_email, config_reader
+ )
@classmethod
- def author(cls, config_reader: Union[None, 'GitConfigParser', 'SectionConstraint'] = None) -> 'Actor':
+ def author(
+ cls, config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None
+ ) -> "Actor":
"""Same as committer(), but defines the main author. It may be specified in the environment,
but defaults to the committer"""
return cls._main_actor(cls.env_author_name, cls.env_author_email, config_reader)
@@ -767,6 +857,7 @@ class Stats(object):
In addition to the items in the stat-dict, it features additional information::
files = number of changed files as int"""
+
__slots__ = ("total", "files")
def __init__(self, total: Total_TD, files: Dict[PathLike, Files_TD]):
@@ -774,30 +865,30 @@ class Stats(object):
self.files = files
@classmethod
- def _list_from_string(cls, repo: 'Repo', text: str) -> 'Stats':
+ def _list_from_string(cls, repo: "Repo", text: str) -> "Stats":
"""Create a Stat object from output retrieved by git-diff.
:return: git.Stat"""
- hsh: HSH_TD = {'total': {'insertions': 0,
- 'deletions': 0,
- 'lines': 0,
- 'files': 0},
- 'files': {}
- }
+ hsh: HSH_TD = {
+ "total": {"insertions": 0, "deletions": 0, "lines": 0, "files": 0},
+ "files": {},
+ }
for line in text.splitlines():
(raw_insertions, raw_deletions, filename) = line.split("\t")
- insertions = raw_insertions != '-' and int(raw_insertions) or 0
- deletions = raw_deletions != '-' and int(raw_deletions) or 0
- hsh['total']['insertions'] += insertions
- hsh['total']['deletions'] += deletions
- hsh['total']['lines'] += insertions + deletions
- hsh['total']['files'] += 1
- files_dict: Files_TD = {'insertions': insertions,
- 'deletions': deletions,
- 'lines': insertions + deletions}
- hsh['files'][filename.strip()] = files_dict
- return Stats(hsh['total'], hsh['files'])
+ insertions = raw_insertions != "-" and int(raw_insertions) or 0
+ deletions = raw_deletions != "-" and int(raw_deletions) or 0
+ hsh["total"]["insertions"] += insertions
+ hsh["total"]["deletions"] += deletions
+ hsh["total"]["lines"] += insertions + deletions
+ hsh["total"]["files"] += 1
+ files_dict: Files_TD = {
+ "insertions": insertions,
+ "deletions": deletions,
+ "lines": insertions + deletions,
+ }
+ hsh["files"][filename.strip()] = files_dict
+ return Stats(hsh["total"], hsh["files"])
class IndexFileSHA1Writer(object):
@@ -809,6 +900,7 @@ class IndexFileSHA1Writer(object):
Only useful to the indexfile
:note: Based on the dulwich project"""
+
__slots__ = ("f", "sha1")
def __init__(self, f: IO) -> None:
@@ -841,6 +933,7 @@ class LockFile(object):
As we are a utility class to be derived from, we only use protected methods.
Locks will automatically be released on destruction"""
+
__slots__ = ("_file_path", "_owns_lock")
def __init__(self, file_path: PathLike) -> None:
@@ -867,8 +960,10 @@ class LockFile(object):
return
lock_file = self._lock_file_path()
if osp.isfile(lock_file):
- raise IOError("Lock for file %r did already exist, delete %r in case the lock is illegal" %
- (self._file_path, lock_file))
+ raise IOError(
+ "Lock for file %r did already exist, delete %r in case the lock is illegal"
+ % (self._file_path, lock_file)
+ )
try:
flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
@@ -909,9 +1004,15 @@ class BlockingLockFile(LockFile):
:note: If the directory containing the lock was removed, an exception will
be raised during the blocking period, preventing hangs as the lock
can never be obtained."""
+
__slots__ = ("_check_interval", "_max_block_time")
- def __init__(self, file_path: PathLike, check_interval_s: float = 0.3, max_block_time_s: int = maxsize) -> None:
+ def __init__(
+ self,
+ file_path: PathLike,
+ check_interval_s: float = 0.3,
+ max_block_time_s: int = maxsize,
+ ) -> None:
"""Configure the instance
:param check_interval_s:
@@ -937,13 +1038,18 @@ class BlockingLockFile(LockFile):
# readable anymore, raise an exception
curtime = time.time()
if not osp.isdir(osp.dirname(self._lock_file_path())):
- msg = "Directory containing the lockfile %r was not readable anymore after waiting %g seconds" % (
- self._lock_file_path(), curtime - starttime)
+ msg = (
+ "Directory containing the lockfile %r was not readable anymore after waiting %g seconds"
+ % (self._lock_file_path(), curtime - starttime)
+ )
raise IOError(msg) from e
# END handle missing directory
if curtime >= maxtime:
- msg = "Waited %g seconds for lock at %r" % (maxtime - starttime, self._lock_file_path())
+ msg = "Waited %g seconds for lock at %r" % (
+ maxtime - starttime,
+ self._lock_file_path(),
+ )
raise IOError(msg) from e
# END abort if we wait too long
time.sleep(self._check_interval)
@@ -971,12 +1077,13 @@ class IterableList(List[T_IterableObj]):
A prefix can be specified which is to be used in case the id returned by the
items always contains a prefix that does not matter to the user, so it
can be left out."""
- __slots__ = ('_id_attr', '_prefix')
- def __new__(cls, id_attr: str, prefix: str = '') -> 'IterableList[IterableObj]':
+ __slots__ = ("_id_attr", "_prefix")
+
+ def __new__(cls, id_attr: str, prefix: str = "") -> "IterableList[IterableObj]":
return super(IterableList, cls).__new__(cls)
- def __init__(self, id_attr: str, prefix: str = '') -> None:
+ def __init__(self, id_attr: str, prefix: str = "") -> None:
self._id_attr = id_attr
self._prefix = prefix
@@ -1008,7 +1115,9 @@ class IterableList(List[T_IterableObj]):
def __getitem__(self, index: Union[SupportsIndex, int, slice, str]) -> T_IterableObj: # type: ignore
- assert isinstance(index, (int, str, slice)), "Index of IterableList should be an int or str"
+ assert isinstance(
+ index, (int, str, slice)
+ ), "Index of IterableList should be an int or str"
if isinstance(index, int):
return list.__getitem__(self, index)
@@ -1018,12 +1127,16 @@ class IterableList(List[T_IterableObj]):
try:
return getattr(self, index)
except AttributeError as e:
- raise IndexError("No item found with id %r" % (self._prefix + index)) from e
+ raise IndexError(
+ "No item found with id %r" % (self._prefix + index)
+ ) from e
# END handle getattr
def __delitem__(self, index: Union[SupportsIndex, int, slice, str]) -> None:
- assert isinstance(index, (int, str)), "Index of IterableList should be an int or str"
+ assert isinstance(
+ index, (int, str)
+ ), "Index of IterableList should be an int or str"
delindex = cast(int, index)
if not isinstance(index, int):
@@ -1043,27 +1156,31 @@ class IterableList(List[T_IterableObj]):
class IterableClassWatcher(type):
- """ Metaclass that watches """
+ """Metaclass that watches"""
+
def __init__(cls, name: str, bases: Tuple, clsdict: Dict) -> None:
for base in bases:
if type(base) == IterableClassWatcher:
- warnings.warn(f"GitPython Iterable subclassed by {name}. "
- "Iterable is deprecated due to naming clash since v3.1.18"
- " and will be removed in 3.1.20, "
- "Use IterableObj instead \n",
- DeprecationWarning,
- stacklevel=2)
+ warnings.warn(
+ f"GitPython Iterable subclassed by {name}. "
+ "Iterable is deprecated due to naming clash since v3.1.18"
+ " and will be removed in 3.1.20, "
+ "Use IterableObj instead \n",
+ DeprecationWarning,
+ stacklevel=2,
+ )
class Iterable(metaclass=IterableClassWatcher):
"""Defines an interface for iterable items which is to assure a uniform
way to retrieve and iterate items within the git repository"""
+
__slots__ = ()
_id_attribute_ = "attribute that most suitably identifies your instance"
@classmethod
- def list_items(cls, repo: 'Repo', *args: Any, **kwargs: Any) -> Any:
+ def list_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Any:
"""
Deprecated, use IterableObj instead.
Find all items of this type - subclasses can specify args and kwargs differently.
@@ -1078,7 +1195,7 @@ class Iterable(metaclass=IterableClassWatcher):
return out_list
@classmethod
- def iter_items(cls, repo: 'Repo', *args: Any, **kwargs: Any) -> Any:
+ def iter_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Any:
# return typed to be compatible with subtypes e.g. Remote
"""For more information about the arguments, see list_items
:return: iterator yielding Items"""
@@ -1096,7 +1213,9 @@ class IterableObj(Protocol):
_id_attribute_: str
@classmethod
- def list_items(cls, repo: 'Repo', *args: Any, **kwargs: Any) -> IterableList[T_IterableObj]:
+ def list_items(
+ cls, repo: "Repo", *args: Any, **kwargs: Any
+ ) -> IterableList[T_IterableObj]:
"""
Find all items of this type - subclasses can specify args and kwargs differently.
If no args are given, subclasses are obliged to return all items if no additional
@@ -1111,13 +1230,15 @@ class IterableObj(Protocol):
@classmethod
@abstractmethod
- def iter_items(cls, repo: 'Repo', *args: Any, **kwargs: Any
- ) -> Iterator[T_IterableObj]: # Iterator[T_IterableObj]:
+ def iter_items(
+ cls, repo: "Repo", *args: Any, **kwargs: Any
+ ) -> Iterator[T_IterableObj]: # Iterator[T_IterableObj]:
# return typed to be compatible with subtypes e.g. Remote
"""For more information about the arguments, see list_items
- :return: iterator yielding Items"""
+ :return: iterator yielding Items"""
raise NotImplementedError("To be implemented by Subclass")
+
# } END classes
diff --git a/setup.py b/setup.py
index 4f1d0b75..8307bfb0 100755
--- a/setup.py
+++ b/setup.py
@@ -6,23 +6,22 @@ import fnmatch
import os
import sys
-with open(os.path.join(os.path.dirname(__file__), 'VERSION')) as v:
+with open(os.path.join(os.path.dirname(__file__), "VERSION")) as v:
VERSION = v.readline().strip()
-with open('requirements.txt') as reqs_file:
+with open("requirements.txt") as reqs_file:
requirements = reqs_file.read().splitlines()
-with open('test-requirements.txt') as reqs_file:
+with open("test-requirements.txt") as reqs_file:
test_requirements = reqs_file.read().splitlines()
-with open('README.md') as rm_file:
+with open("README.md") as rm_file:
long_description = rm_file.read()
class build_py(_build_py):
-
def run(self) -> None:
- init = os.path.join(self.build_lib, 'git', '__init__.py')
+ init = os.path.join(self.build_lib, "git", "__init__.py")
if os.path.exists(init):
os.unlink(init)
_build_py.run(self)
@@ -31,13 +30,12 @@ class build_py(_build_py):
class sdist(_sdist):
-
def make_release_tree(self, base_dir: str, files: Sequence) -> None:
_sdist.make_release_tree(self, base_dir, files)
- orig = os.path.join('git', '__init__.py')
+ orig = os.path.join("git", "__init__.py")
assert os.path.exists(orig), orig
dest = os.path.join(base_dir, orig)
- if hasattr(os, 'link') and os.path.exists(dest):
+ if hasattr(os, "link") and os.path.exists(dest):
os.unlink(dest)
self.copy_file(orig, dest)
_stamp_version(dest)
@@ -46,9 +44,9 @@ class sdist(_sdist):
def _stamp_version(filename: str) -> None:
found, out = False, []
try:
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
for line in f:
- if '__version__ =' in line:
+ if "__version__ =" in line:
line = line.replace("'git'", "'%s'" % VERSION)
found = True
out.append(line)
@@ -56,10 +54,12 @@ def _stamp_version(filename: str) -> None:
print("Couldn't find file %s to stamp version" % filename, file=sys.stderr)
if found:
- with open(filename, 'w') as f:
+ with open(filename, "w") as f:
f.writelines(out)
else:
- print("WARNING: Couldn't find version line in file %s" % filename, file=sys.stderr)
+ print(
+ "WARNING: Couldn't find version line in file %s" % filename, file=sys.stderr
+ )
def build_py_modules(basedir: str, excludes: Sequence = ()) -> Sequence:
@@ -82,7 +82,7 @@ def build_py_modules(basedir: str, excludes: Sequence = ()) -> Sequence:
setup(
name="GitPython",
- cmdclass={'build_py': build_py, 'sdist': sdist},
+ cmdclass={"build_py": build_py, "sdist": sdist},
version=VERSION,
description="""GitPython is a python library used to interact with Git repositories""",
author="Sebastian Thiel, Michael Trier",
@@ -92,8 +92,8 @@ setup(
packages=find_packages(exclude=["test", "test.*"]),
include_package_data=True,
py_modules=build_py_modules("./git", excludes=["git.ext.*"]),
- package_dir={'git': 'git'},
- python_requires='>=3.7',
+ package_dir={"git": "git"},
+ python_requires=">=3.7",
install_requires=requirements,
tests_require=requirements + test_requirements,
zip_safe=False,
@@ -123,5 +123,5 @@ setup(
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
- ]
+ ],
)
diff --git a/test/lib/__init__.py b/test/lib/__init__.py
index 1551ce45..ae4c2b67 100644
--- a/test/lib/__init__.py
+++ b/test/lib/__init__.py
@@ -8,5 +8,8 @@
import inspect
from .helper import *
-__all__ = [name for name, obj in locals().items()
- if not (name.startswith('_') or inspect.ismodule(obj))]
+__all__ = [
+ name
+ for name, obj in locals().items()
+ if not (name.startswith("_") or inspect.ismodule(obj))
+]
diff --git a/test/lib/helper.py b/test/lib/helper.py
index 632d6af9..8f4046da 100644
--- a/test/lib/helper.py
+++ b/test/lib/helper.py
@@ -31,29 +31,37 @@ GIT_REPO = os.environ.get("GIT_PYTHON_TEST_GIT_REPO_BASE", ospd(ospd(ospd(__file
GIT_DAEMON_PORT = os.environ.get("GIT_PYTHON_TEST_GIT_DAEMON_PORT", "19418")
__all__ = (
- 'fixture_path', 'fixture', 'StringProcessAdapter',
- 'with_rw_directory', 'with_rw_repo', 'with_rw_and_rw_remote_repo',
- 'TestBase', 'TestCase',
- 'SkipTest', 'skipIf',
- 'GIT_REPO', 'GIT_DAEMON_PORT'
+ "fixture_path",
+ "fixture",
+ "StringProcessAdapter",
+ "with_rw_directory",
+ "with_rw_repo",
+ "with_rw_and_rw_remote_repo",
+ "TestBase",
+ "TestCase",
+ "SkipTest",
+ "skipIf",
+ "GIT_REPO",
+ "GIT_DAEMON_PORT",
)
log = logging.getLogger(__name__)
-#{ Routines
+# { Routines
def fixture_path(name):
- return osp.join(ospd(ospd(__file__)), 'fixtures', name)
+ return osp.join(ospd(ospd(__file__)), "fixtures", name)
def fixture(name):
- with open(fixture_path(name), 'rb') as fd:
+ with open(fixture_path(name), "rb") as fd:
return fd.read()
-#} END routines
-#{ Adapters
+# } END routines
+
+# { Adapters
class StringProcessAdapter(object):
@@ -70,9 +78,10 @@ class StringProcessAdapter(object):
poll = wait
-#} END adapters
-#{ Decorators
+# } END adapters
+
+# { Decorators
def with_rw_directory(func):
@@ -88,8 +97,12 @@ def with_rw_directory(func):
try:
return func(self, path)
except Exception:
- log.info("Test %s.%s failed, output is at %r\n",
- type(self).__name__, func.__name__, path)
+ log.info(
+ "Test %s.%s failed, output is at %r\n",
+ type(self).__name__,
+ func.__name__,
+ path,
+ )
keep = True
raise
finally:
@@ -114,14 +127,16 @@ def with_rw_repo(working_tree_ref, bare=False):
To make working with relative paths easier, the cwd will be set to the working
dir of the repository.
"""
- assert isinstance(working_tree_ref, str), "Decorator requires ref name for working tree checkout"
+ assert isinstance(
+ working_tree_ref, str
+ ), "Decorator requires ref name for working tree checkout"
def argument_passer(func):
@wraps(func)
def repo_creator(self):
- prefix = 'non_'
+ prefix = "non_"
if bare:
- prefix = ''
+ prefix = ""
# END handle prefix
repo_dir = tempfile.mktemp(prefix="%sbare_%s" % (prefix, func.__name__))
rw_repo = self.rorepo.clone(repo_dir, shared=True, bare=bare, n=True)
@@ -151,8 +166,10 @@ def with_rw_repo(working_tree_ref, bare=False):
rmtree(repo_dir)
# END rm test repo if possible
# END cleanup
+
# END rw repo creator
return repo_creator
+
# END argument passer
return argument_passer
@@ -170,24 +187,29 @@ def git_daemon_launched(base_path, ip, port):
# So, invoke it as a single command.
## Cygwin-git has no daemon. But it can use MINGW's.
#
- daemon_cmd = ['git-daemon',
- '--enable=receive-pack',
- '--listen=%s' % ip,
- '--port=%s' % port,
- '--base-path=%s' % base_path,
- base_path]
+ daemon_cmd = [
+ "git-daemon",
+ "--enable=receive-pack",
+ "--listen=%s" % ip,
+ "--port=%s" % port,
+ "--base-path=%s" % base_path,
+ base_path,
+ ]
gd = Git().execute(daemon_cmd, as_process=True)
else:
- gd = Git().daemon(base_path,
- enable='receive-pack',
- listen=ip,
- port=port,
- base_path=base_path,
- as_process=True)
+ gd = Git().daemon(
+ base_path,
+ enable="receive-pack",
+ listen=ip,
+ port=port,
+ base_path=base_path,
+ as_process=True,
+ )
# yes, I know ... fortunately, this is always going to work if sleep time is just large enough
time.sleep(0.5 * (1 + is_win))
except Exception as ex:
- msg = textwrap.dedent("""
+ msg = textwrap.dedent(
+ """
Launching git-daemon failed due to: %s
Probably test will fail subsequently.
@@ -195,14 +217,17 @@ def git_daemon_launched(base_path, ip, port):
git daemon --enable=receive-pack --listen=%s --port=%s --base-path=%s %s
You may also run the daemon on a different port by passing --port=<port>"
and setting the environment variable GIT_PYTHON_TEST_GIT_DAEMON_PORT to <port>
- """)
+ """
+ )
if is_win:
- msg += textwrap.dedent(r"""
+ msg += textwrap.dedent(
+ r"""
On Windows,
the `git-daemon.exe` must be in PATH.
For MINGW, look into .\Git\mingw64\libexec\git-core\), but problems with paths might appear.
- CYGWIN has no daemon, but if one exists, it gets along fine (but has also paths problems).""")
+ CYGWIN has no daemon, but if one exists, it gets along fine (but has also paths problems)."""
+ )
log.warning(msg, ex, ip, port, base_path, base_path, exc_info=1)
yield # OK, assume daemon started manually.
@@ -245,16 +270,23 @@ def with_rw_and_rw_remote_repo(working_tree_ref):
"""
from git import Git, Remote # To avoid circular deps.
- assert isinstance(working_tree_ref, str), "Decorator requires ref name for working tree checkout"
+ assert isinstance(
+ working_tree_ref, str
+ ), "Decorator requires ref name for working tree checkout"
def argument_passer(func):
-
@wraps(func)
def remote_repo_creator(self):
- rw_daemon_repo_dir = tempfile.mktemp(prefix="daemon_repo-%s-" % func.__name__)
- rw_repo_dir = tempfile.mktemp(prefix="daemon_cloned_repo-%s-" % func.__name__)
-
- rw_daemon_repo = self.rorepo.clone(rw_daemon_repo_dir, shared=True, bare=True)
+ rw_daemon_repo_dir = tempfile.mktemp(
+ prefix="daemon_repo-%s-" % func.__name__
+ )
+ rw_repo_dir = tempfile.mktemp(
+ prefix="daemon_cloned_repo-%s-" % func.__name__
+ )
+
+ rw_daemon_repo = self.rorepo.clone(
+ rw_daemon_repo_dir, shared=True, bare=True
+ )
# recursive alternates info ?
rw_repo = rw_daemon_repo.clone(rw_repo_dir, shared=True, bare=False, n=True)
try:
@@ -280,13 +312,19 @@ def with_rw_and_rw_remote_repo(working_tree_ref):
base_daemon_path, rel_repo_dir = osp.split(rw_daemon_repo_dir)
- remote_repo_url = Git.polish_url("git://localhost:%s/%s" % (GIT_DAEMON_PORT, rel_repo_dir))
+ remote_repo_url = Git.polish_url(
+ "git://localhost:%s/%s" % (GIT_DAEMON_PORT, rel_repo_dir)
+ )
with d_remote.config_writer as cw:
- cw.set('url', remote_repo_url)
-
- with git_daemon_launched(Git.polish_url(base_daemon_path, is_cygwin=False), # No daemon in Cygwin.
- '127.0.0.1',
- GIT_DAEMON_PORT):
+ cw.set("url", remote_repo_url)
+
+ with git_daemon_launched(
+ Git.polish_url(
+ base_daemon_path, is_cygwin=False
+ ), # No daemon in Cygwin.
+ "127.0.0.1",
+ GIT_DAEMON_PORT,
+ ):
# Try listing remotes, to diagnose whether the daemon is up.
rw_repo.git.ls_remote(d_remote)
@@ -294,8 +332,11 @@ def with_rw_and_rw_remote_repo(working_tree_ref):
try:
return func(self, rw_repo, rw_daemon_repo)
except: # noqa E722
- log.info("Keeping repos after failure: \n rw_repo_dir: %s \n rw_daemon_repo_dir: %s",
- rw_repo_dir, rw_daemon_repo_dir)
+ log.info(
+ "Keeping repos after failure: \n rw_repo_dir: %s \n rw_daemon_repo_dir: %s",
+ rw_repo_dir,
+ rw_daemon_repo_dir,
+ )
rw_repo_dir = rw_daemon_repo_dir = None
raise
@@ -312,14 +353,17 @@ def with_rw_and_rw_remote_repo(working_tree_ref):
if rw_daemon_repo_dir:
rmtree(rw_daemon_repo_dir)
# END cleanup
+
# END bare repo creator
return remote_repo_creator
# END remote repo creator
+
# END argument parser
return argument_passer
-#} END decorators
+
+# } END decorators
class TestBase(TestCase):
@@ -344,7 +388,10 @@ class TestBase(TestCase):
def _small_repo_url(self):
""":return" a path to a small, clonable repository"""
from git.cmd import Git
- return Git.polish_url(osp.join(self.rorepo.working_tree_dir, 'git/ext/gitdb/gitdb/ext/smmap'))
+
+ return Git.polish_url(
+ osp.join(self.rorepo.working_tree_dir, "git/ext/gitdb/gitdb/ext/smmap")
+ )
@classmethod
def setUpClass(cls):
@@ -353,6 +400,7 @@ class TestBase(TestCase):
each test type has its own repository
"""
from git import Repo
+
gc.collect()
cls.rorepo = Repo(GIT_REPO)
diff --git a/test/performance/lib.py b/test/performance/lib.py
index 86f87757..101e2cd4 100644
--- a/test/performance/lib.py
+++ b/test/performance/lib.py
@@ -3,27 +3,21 @@ import logging
import os
import tempfile
-from git import (
- Repo
-)
-from git.db import (
- GitCmdObjectDB,
- GitDB
-)
-from test.lib import (
- TestBase
-)
+from git import Repo
+from git.db import GitCmdObjectDB, GitDB
+from test.lib import TestBase
from git.util import rmtree
import os.path as osp
-#{ Invariants
+# { Invariants
k_env_git_repo = "GIT_PYTHON_TEST_GIT_REPO_BASE"
-#} END invariants
+# } END invariants
-#{ Base Classes
+# { Base Classes
+
class TestBigRepoR(TestBase):
@@ -39,8 +33,8 @@ class TestBigRepoR(TestBase):
* As gitrepo, but uses pure python implementation
"""
- #{ Invariants
- #} END invariants
+ # { Invariants
+ # } END invariants
def setUp(self):
try:
@@ -51,11 +45,17 @@ class TestBigRepoR(TestBase):
repo_path = os.environ.get(k_env_git_repo)
if repo_path is None:
logging.info(
- ("You can set the %s environment variable to a .git repository of" % k_env_git_repo) +
- "your choice - defaulting to the gitpython repository")
+ (
+ "You can set the %s environment variable to a .git repository of"
+ % k_env_git_repo
+ )
+ + "your choice - defaulting to the gitpython repository"
+ )
repo_path = osp.dirname(__file__)
# end set some repo path
- self.gitrorepo = Repo(repo_path, odbt=GitCmdObjectDB, search_parent_directories=True)
+ self.gitrorepo = Repo(
+ repo_path, odbt=GitCmdObjectDB, search_parent_directories=True
+ )
self.puregitrorepo = Repo(repo_path, odbt=GitDB, search_parent_directories=True)
def tearDown(self):
@@ -79,7 +79,9 @@ class TestBigRepoRW(TestBigRepoR):
pass
dirname = tempfile.mktemp()
os.mkdir(dirname)
- self.gitrwrepo = self.gitrorepo.clone(dirname, shared=True, bare=True, odbt=GitCmdObjectDB)
+ self.gitrwrepo = self.gitrorepo.clone(
+ dirname, shared=True, bare=True, odbt=GitCmdObjectDB
+ )
self.puregitrwrepo = Repo(dirname, odbt=GitDB)
def tearDown(self):
@@ -91,4 +93,5 @@ class TestBigRepoRW(TestBigRepoR):
self.puregitrwrepo.git.clear_cache()
self.puregitrwrepo = None
-#} END base classes
+
+# } END base classes
diff --git a/test/performance/test_commit.py b/test/performance/test_commit.py
index 8158a1e6..25cc34b8 100644
--- a/test/performance/test_commit.py
+++ b/test/performance/test_commit.py
@@ -14,13 +14,13 @@ from test.test_commit import TestCommitSerialization
class TestPerformance(TestBigRepoRW, TestCommitSerialization):
-
def tearDown(self):
import gc
+
gc.collect()
# ref with about 100 commits in its history
- ref_100 = '0.1.6'
+ ref_100 = "0.1.6"
def _query_commit_info(self, c):
c.author
@@ -50,8 +50,11 @@ class TestPerformance(TestBigRepoRW, TestCommitSerialization):
# END for each object
# END for each commit
elapsed_time = time() - st
- print("Traversed %i Trees and a total of %i uncached objects in %s [s] ( %f objs/s )"
- % (nc, no, elapsed_time, no / elapsed_time), file=sys.stderr)
+ print(
+ "Traversed %i Trees and a total of %i uncached objects in %s [s] ( %f objs/s )"
+ % (nc, no, elapsed_time, no / elapsed_time),
+ file=sys.stderr,
+ )
def test_commit_traversal(self):
# bound to cat-file parsing performance
@@ -62,8 +65,11 @@ class TestPerformance(TestBigRepoRW, TestCommitSerialization):
self._query_commit_info(c)
# END for each traversed commit
elapsed_time = time() - st
- print("Traversed %i Commits in %s [s] ( %f commits/s )"
- % (nc, elapsed_time, nc / elapsed_time), file=sys.stderr)
+ print(
+ "Traversed %i Commits in %s [s] ( %f commits/s )"
+ % (nc, elapsed_time, nc / elapsed_time),
+ file=sys.stderr,
+ )
def test_commit_iteration(self):
# bound to stream parsing performance
@@ -74,11 +80,14 @@ class TestPerformance(TestBigRepoRW, TestCommitSerialization):
self._query_commit_info(c)
# END for each traversed commit
elapsed_time = time() - st
- print("Iterated %i Commits in %s [s] ( %f commits/s )"
- % (nc, elapsed_time, nc / elapsed_time), file=sys.stderr)
+ print(
+ "Iterated %i Commits in %s [s] ( %f commits/s )"
+ % (nc, elapsed_time, nc / elapsed_time),
+ file=sys.stderr,
+ )
def test_commit_serialization(self):
- self.assert_commit_serialization(self.gitrwrepo, '58c78e6', True)
+ self.assert_commit_serialization(self.gitrwrepo, "58c78e6", True)
rwrepo = self.gitrwrepo
make_object = rwrepo.odb.store
@@ -89,10 +98,20 @@ class TestPerformance(TestBigRepoRW, TestCommitSerialization):
nc = 5000
st = time()
for i in range(nc):
- cm = Commit(rwrepo, Commit.NULL_BIN_SHA, hc.tree,
- hc.author, hc.authored_date, hc.author_tz_offset,
- hc.committer, hc.committed_date, hc.committer_tz_offset,
- str(i), parents=hc.parents, encoding=hc.encoding)
+ cm = Commit(
+ rwrepo,
+ Commit.NULL_BIN_SHA,
+ hc.tree,
+ hc.author,
+ hc.authored_date,
+ hc.author_tz_offset,
+ hc.committer,
+ hc.committed_date,
+ hc.committer_tz_offset,
+ str(i),
+ parents=hc.parents,
+ encoding=hc.encoding,
+ )
stream = BytesIO()
cm._serialize(stream)
@@ -103,5 +122,8 @@ class TestPerformance(TestBigRepoRW, TestCommitSerialization):
# END commit creation
elapsed = time() - st
- print("Serialized %i commits to loose objects in %f s ( %f commits / s )"
- % (nc, elapsed, nc / elapsed), file=sys.stderr)
+ print(
+ "Serialized %i commits to loose objects in %f s ( %f commits / s )"
+ % (nc, elapsed, nc / elapsed),
+ file=sys.stderr,
+ )
diff --git a/test/performance/test_odb.py b/test/performance/test_odb.py
index c9521c56..680464c9 100644
--- a/test/performance/test_odb.py
+++ b/test/performance/test_odb.py
@@ -2,13 +2,10 @@
import sys
from time import time
-from .lib import (
- TestBigRepoR
-)
+from .lib import TestBigRepoR
class TestObjDBPerformance(TestBigRepoR):
-
def test_random_access(self):
results = [["Iterate Commits"], ["Iterate Blobs"], ["Retrieve Blob Data"]]
for repo in (self.gitrorepo, self.puregitrorepo):
@@ -19,8 +16,11 @@ class TestObjDBPerformance(TestBigRepoR):
nc = len(commits)
elapsed = time() - st
- print("%s: Retrieved %i commits from ObjectStore in %g s ( %f commits / s )"
- % (type(repo.odb), nc, elapsed, nc / elapsed), file=sys.stderr)
+ print(
+ "%s: Retrieved %i commits from ObjectStore in %g s ( %f commits / s )"
+ % (type(repo.odb), nc, elapsed, nc / elapsed),
+ file=sys.stderr,
+ )
results[0].append(elapsed)
# GET TREES
@@ -33,7 +33,7 @@ class TestObjDBPerformance(TestBigRepoR):
blobs = []
for item in tree.traverse():
nt += 1
- if item.type == 'blob':
+ if item.type == "blob":
blobs.append(item)
# direct access for speed
# END while trees are there for walking
@@ -41,8 +41,11 @@ class TestObjDBPerformance(TestBigRepoR):
# END for each commit
elapsed = time() - st
- print("%s: Retrieved %i objects from %i commits in %g s ( %f objects / s )"
- % (type(repo.odb), nt, len(commits), elapsed, nt / elapsed), file=sys.stderr)
+ print(
+ "%s: Retrieved %i objects from %i commits in %g s ( %f objects / s )"
+ % (type(repo.odb), nt, len(commits), elapsed, nt / elapsed),
+ file=sys.stderr,
+ )
results[1].append(elapsed)
# GET BLOBS
@@ -60,13 +63,25 @@ class TestObjDBPerformance(TestBigRepoR):
# END for each bloblist
elapsed = time() - st
- msg = "%s: Retrieved %i blob (%i KiB) and their data in %g s ( %f blobs / s, %f KiB / s )"\
- % (type(repo.odb), nb, data_bytes / 1000, elapsed, nb / elapsed, (data_bytes / 1000) / elapsed)
+ msg = (
+ "%s: Retrieved %i blob (%i KiB) and their data in %g s ( %f blobs / s, %f KiB / s )"
+ % (
+ type(repo.odb),
+ nb,
+ data_bytes / 1000,
+ elapsed,
+ nb / elapsed,
+ (data_bytes / 1000) / elapsed,
+ )
+ )
print(msg, file=sys.stderr)
results[2].append(elapsed)
# END for each repo type
# final results
for test_name, a, b in results:
- print("%s: %f s vs %f s, pure is %f times slower" % (test_name, a, b, b / a), file=sys.stderr)
+ print(
+ "%s: %f s vs %f s, pure is %f times slower" % (test_name, a, b, b / a),
+ file=sys.stderr,
+ )
# END for each result
diff --git a/test/performance/test_streams.py b/test/performance/test_streams.py
index 28e6b13e..2ae94e29 100644
--- a/test/performance/test_streams.py
+++ b/test/performance/test_streams.py
@@ -4,36 +4,29 @@ import subprocess
import sys
from time import time
-from test.lib import (
- with_rw_repo
-)
+from test.lib import with_rw_repo
from git.util import bin_to_hex
-from gitdb import (
- LooseObjectDB,
- IStream
-)
+from gitdb import LooseObjectDB, IStream
from gitdb.test.lib import make_memory_file
import os.path as osp
-from .lib import (
- TestBigRepoR
-)
+from .lib import TestBigRepoR
class TestObjDBPerformance(TestBigRepoR):
- large_data_size_bytes = 1000 * 1000 * 10 # some MiB should do it
- moderate_data_size_bytes = 1000 * 1000 * 1 # just 1 MiB
+ large_data_size_bytes = 1000 * 1000 * 10 # some MiB should do it
+ moderate_data_size_bytes = 1000 * 1000 * 1 # just 1 MiB
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_large_data_streaming(self, rwrepo):
# TODO: This part overlaps with the same file in gitdb.test.performance.test_stream
# It should be shared if possible
- ldb = LooseObjectDB(osp.join(rwrepo.git_dir, 'objects'))
+ ldb = LooseObjectDB(osp.join(rwrepo.git_dir, "objects"))
for randomize in range(2):
- desc = (randomize and 'random ') or ''
+ desc = (randomize and "random ") or ""
print("Creating %s data ..." % desc, file=sys.stderr)
st = time()
size, stream = make_memory_file(self.large_data_size_bytes, randomize)
@@ -42,7 +35,7 @@ class TestObjDBPerformance(TestBigRepoR):
# writing - due to the compression it will seem faster than it is
st = time()
- binsha = ldb.store(IStream('blob', size, stream)).binsha
+ binsha = ldb.store(IStream("blob", size, stream)).binsha
elapsed_add = time() - st
assert ldb.has_object(binsha)
db_file = ldb.readable_db_object_path(bin_to_hex(binsha))
@@ -79,33 +72,45 @@ class TestObjDBPerformance(TestBigRepoR):
elapsed_readchunks = time() - st
stream.seek(0)
- assert b''.join(chunks) == stream.getvalue()
+ assert b"".join(chunks) == stream.getvalue()
cs_kib = cs / 1000
- print("Read %i KiB of %s data in %i KiB chunks from loose odb in %f s ( %f Read KiB / s)"
- % (size_kib, desc, cs_kib, elapsed_readchunks, size_kib / elapsed_readchunks), file=sys.stderr)
+ print(
+ "Read %i KiB of %s data in %i KiB chunks from loose odb in %f s ( %f Read KiB / s)"
+ % (
+ size_kib,
+ desc,
+ cs_kib,
+ elapsed_readchunks,
+ size_kib / elapsed_readchunks,
+ ),
+ file=sys.stderr,
+ )
# del db file so git has something to do
ostream = None
import gc
+
gc.collect()
os.remove(db_file)
# VS. CGIT
##########
# CGIT ! Can using the cgit programs be faster ?
- proc = rwrepo.git.hash_object('-w', '--stdin', as_process=True, istream=subprocess.PIPE)
+ proc = rwrepo.git.hash_object(
+ "-w", "--stdin", as_process=True, istream=subprocess.PIPE
+ )
# write file - pump everything in at once to be a fast as possible
- data = stream.getvalue() # cache it
+ data = stream.getvalue() # cache it
st = time()
proc.stdin.write(data)
proc.stdin.close()
gitsha = proc.stdout.read().strip()
proc.wait()
gelapsed_add = time() - st
- del(data)
- assert gitsha == bin_to_hex(binsha) # we do it the same way, right ?
+ del data
+ assert gitsha == bin_to_hex(binsha) # we do it the same way, right ?
# as its the same sha, we reuse our path
fsize_kib = osp.getsize(db_file) / 1000
@@ -114,19 +119,28 @@ class TestObjDBPerformance(TestBigRepoR):
print(msg, file=sys.stderr)
# compare ...
- print("Git-Python is %f %% faster than git when adding big %s files"
- % (100.0 - (elapsed_add / gelapsed_add) * 100, desc), file=sys.stderr)
+ print(
+ "Git-Python is %f %% faster than git when adding big %s files"
+ % (100.0 - (elapsed_add / gelapsed_add) * 100, desc),
+ file=sys.stderr,
+ )
# read all
st = time()
_hexsha, _typename, size, data = rwrepo.git.get_object_data(gitsha)
gelapsed_readall = time() - st
- print("Read %i KiB of %s data at once using git-cat-file in %f s ( %f Read KiB / s)"
- % (size_kib, desc, gelapsed_readall, size_kib / gelapsed_readall), file=sys.stderr)
+ print(
+ "Read %i KiB of %s data at once using git-cat-file in %f s ( %f Read KiB / s)"
+ % (size_kib, desc, gelapsed_readall, size_kib / gelapsed_readall),
+ file=sys.stderr,
+ )
# compare
- print("Git-Python is %f %% faster than git when reading big %sfiles"
- % (100.0 - (elapsed_readall / gelapsed_readall) * 100, desc), file=sys.stderr)
+ print(
+ "Git-Python is %f %% faster than git when reading big %sfiles"
+ % (100.0 - (elapsed_readall / gelapsed_readall) * 100, desc),
+ file=sys.stderr,
+ )
# read chunks
st = time()
@@ -138,10 +152,19 @@ class TestObjDBPerformance(TestBigRepoR):
# END read stream
gelapsed_readchunks = time() - st
msg = "Read %i KiB of %s data in %i KiB chunks from git-cat-file in %f s ( %f Read KiB / s)"
- msg %= (size_kib, desc, cs_kib, gelapsed_readchunks, size_kib / gelapsed_readchunks)
+ msg %= (
+ size_kib,
+ desc,
+ cs_kib,
+ gelapsed_readchunks,
+ size_kib / gelapsed_readchunks,
+ )
print(msg, file=sys.stderr)
# compare
- print("Git-Python is %f %% faster than git when reading big %s files in chunks"
- % (100.0 - (elapsed_readchunks / gelapsed_readchunks) * 100, desc), file=sys.stderr)
+ print(
+ "Git-Python is %f %% faster than git when reading big %s files in chunks"
+ % (100.0 - (elapsed_readchunks / gelapsed_readchunks) * 100, desc),
+ file=sys.stderr,
+ )
# END for each randomization factor
diff --git a/test/test_actor.py b/test/test_actor.py
index 32d16ea7..ce0c74fc 100644
--- a/test/test_actor.py
+++ b/test/test_actor.py
@@ -9,7 +9,6 @@ from git import Actor
class TestActor(TestBase):
-
def test_from_string_should_separate_name_and_email(self):
a = Actor._from_string("Michael Trier <mtrier@example.com>")
self.assertEqual("Michael Trier", a.name)
diff --git a/test/test_base.py b/test/test_base.py
index 68ce6816..a7c034e2 100644
--- a/test/test_base.py
+++ b/test/test_base.py
@@ -9,19 +9,10 @@ import sys
import tempfile
from unittest import SkipTest, skipIf
-from git.objects import (
- Blob,
- Tree,
- Commit,
- TagObject
-)
+from git.objects import Blob, Tree, Commit, TagObject
from git.compat import is_win
from git.objects.util import get_object_type_by_name
-from test.lib import (
- TestBase as _TestBase,
- with_rw_repo,
- with_rw_and_rw_remote_repo
-)
+from test.lib import TestBase as _TestBase, with_rw_repo, with_rw_and_rw_remote_repo
from git.util import hex_to_bin, HIDE_WINDOWS_FREEZE_ERRORS
import git.objects.base as base
@@ -29,15 +20,17 @@ import os.path as osp
class TestBase(_TestBase):
-
def tearDown(self):
import gc
+
gc.collect()
- type_tuples = (("blob", "8741fc1d09d61f02ffd8cded15ff603eff1ec070", "blob.py"),
- ("tree", "3a6a5e3eeed3723c09f1ef0399f81ed6b8d82e79", "directory"),
- ("commit", "4251bd59fb8e11e40c40548cba38180a9536118c", None),
- ("tag", "e56a60e8e9cd333cfba0140a77cd12b0d9398f10", None))
+ type_tuples = (
+ ("blob", "8741fc1d09d61f02ffd8cded15ff603eff1ec070", "blob.py"),
+ ("tree", "3a6a5e3eeed3723c09f1ef0399f81ed6b8d82e79", "directory"),
+ ("commit", "4251bd59fb8e11e40c40548cba38180a9536118c", None),
+ ("tag", "e56a60e8e9cd333cfba0140a77cd12b0d9398f10", None),
+ )
def test_base_object(self):
# test interface of base object classes
@@ -67,8 +60,8 @@ class TestBase(_TestBase):
if isinstance(item, base.IndexObject):
num_index_objs += 1
- if hasattr(item, 'path'): # never runs here
- assert not item.path.startswith("/") # must be relative
+ if hasattr(item, "path"): # never runs here
+ assert not item.path.startswith("/") # must be relative
assert isinstance(item.mode, int)
# END index object check
@@ -77,8 +70,8 @@ class TestBase(_TestBase):
data = data_stream.read()
assert data
- tmpfilename = tempfile.mktemp(suffix='test-stream')
- with open(tmpfilename, 'wb+') as tmpfile:
+ tmpfilename = tempfile.mktemp(suffix="test-stream")
+ with open(tmpfilename, "wb+") as tmpfile:
self.assertEqual(item, item.stream_data(tmpfile))
tmpfile.seek(0)
self.assertEqual(tmpfile.read(), data)
@@ -99,26 +92,28 @@ class TestBase(_TestBase):
def test_object_resolution(self):
# objects must be resolved to shas so they compare equal
- self.assertEqual(self.rorepo.head.reference.object, self.rorepo.active_branch.object)
+ self.assertEqual(
+ self.rorepo.head.reference.object, self.rorepo.active_branch.object
+ )
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_with_bare_rw_repo(self, bare_rw_repo):
assert bare_rw_repo.config_reader("repository").getboolean("core", "bare")
- assert osp.isfile(osp.join(bare_rw_repo.git_dir, 'HEAD'))
+ assert osp.isfile(osp.join(bare_rw_repo.git_dir, "HEAD"))
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_with_rw_repo(self, rw_repo):
assert not rw_repo.config_reader("repository").getboolean("core", "bare")
- assert osp.isdir(osp.join(rw_repo.working_tree_dir, 'lib'))
+ assert osp.isdir(osp.join(rw_repo.working_tree_dir, "lib"))
@skipIf(HIDE_WINDOWS_FREEZE_ERRORS, "FIXME: Freezes! sometimes...")
- @with_rw_and_rw_remote_repo('0.1.6')
+ @with_rw_and_rw_remote_repo("0.1.6")
def test_with_rw_remote_and_rw_repo(self, rw_repo, rw_remote_repo):
assert not rw_repo.config_reader("repository").getboolean("core", "bare")
assert rw_remote_repo.config_reader("repository").getboolean("core", "bare")
- assert osp.isdir(osp.join(rw_repo.working_tree_dir, 'lib'))
+ assert osp.isdir(osp.join(rw_repo.working_tree_dir, "lib"))
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_add_unicode(self, rw_repo):
filename = "שלום.txt"
@@ -131,7 +126,7 @@ class TestBase(_TestBase):
raise SkipTest("Environment doesn't support unicode filenames") from e
with open(file_path, "wb") as fp:
- fp.write(b'something')
+ fp.write(b"something")
if is_win:
# on windows, there is no way this works, see images on
@@ -144,4 +139,4 @@ class TestBase(_TestBase):
# on posix, we can just add unicode files without problems
rw_repo.git.add(rw_repo.working_dir)
# end
- rw_repo.index.commit('message')
+ rw_repo.index.commit("message")
diff --git a/test/test_blob.py b/test/test_blob.py
index c9c8c48a..ad5b46c1 100644
--- a/test/test_blob.py
+++ b/test/test_blob.py
@@ -9,14 +9,15 @@ from git import Blob
class TestBlob(TestBase):
-
def test_mime_type_should_return_mime_type_for_known_types(self):
- blob = Blob(self.rorepo, **{'binsha': Blob.NULL_BIN_SHA, 'path': 'foo.png'})
+ blob = Blob(self.rorepo, **{"binsha": Blob.NULL_BIN_SHA, "path": "foo.png"})
self.assertEqual("image/png", blob.mime_type)
def test_mime_type_should_return_text_plain_for_unknown_types(self):
- blob = Blob(self.rorepo, **{'binsha': Blob.NULL_BIN_SHA, 'path': 'something'})
+ blob = Blob(self.rorepo, **{"binsha": Blob.NULL_BIN_SHA, "path": "something"})
self.assertEqual("text/plain", blob.mime_type)
def test_nodict(self):
- self.assertRaises(AttributeError, setattr, self.rorepo.tree()['AUTHORS'], 'someattr', 2)
+ self.assertRaises(
+ AttributeError, setattr, self.rorepo.tree()["AUTHORS"], "someattr", 2
+ )
diff --git a/test/test_clone.py b/test/test_clone.py
index e9f6714d..6bd944f9 100644
--- a/test/test_clone.py
+++ b/test/test_clone.py
@@ -17,16 +17,23 @@ class TestClone(TestBase):
@with_rw_directory
def test_checkout_in_non_empty_dir(self, rw_dir):
non_empty_dir = Path(rw_dir)
- garbage_file = non_empty_dir / 'not-empty'
- garbage_file.write_text('Garbage!')
+ garbage_file = non_empty_dir / "not-empty"
+ garbage_file.write_text("Garbage!")
# Verify that cloning into the non-empty dir fails while complaining about
# the target directory not being empty/non-existent
try:
self.rorepo.clone(non_empty_dir)
except git.GitCommandError as exc:
- self.assertTrue(exc.stderr, "GitCommandError's 'stderr' is unexpectedly empty")
- expr = re.compile(r'(?is).*\bfatal:\s+destination\s+path\b.*\bexists\b.*\bnot\b.*\bempty\s+directory\b')
- self.assertTrue(expr.search(exc.stderr), '"%s" does not match "%s"' % (expr.pattern, exc.stderr))
+ self.assertTrue(
+ exc.stderr, "GitCommandError's 'stderr' is unexpectedly empty"
+ )
+ expr = re.compile(
+ r"(?is).*\bfatal:\s+destination\s+path\b.*\bexists\b.*\bnot\b.*\bempty\s+directory\b"
+ )
+ self.assertTrue(
+ expr.search(exc.stderr),
+ '"%s" does not match "%s"' % (expr.pattern, exc.stderr),
+ )
else:
self.fail("GitCommandError not raised")
diff --git a/test/test_commit.py b/test/test_commit.py
index 40cf7dd2..17a4fe4f 100644
--- a/test/test_commit.py
+++ b/test/test_commit.py
@@ -19,12 +19,7 @@ from git import (
from git import Repo
from git.objects.util import tzoffset, utc
from git.repo.fun import touch
-from test.lib import (
- TestBase,
- with_rw_repo,
- fixture_path,
- StringProcessAdapter
-)
+from test.lib import TestBase, with_rw_repo, fixture_path, StringProcessAdapter
from test.lib import with_rw_directory
from gitdb import IStream
@@ -32,13 +27,14 @@ import os.path as osp
class TestCommitSerialization(TestBase):
-
- def assert_commit_serialization(self, rwrepo, commit_id, print_performance_info=False):
+ def assert_commit_serialization(
+ self, rwrepo, commit_id, print_performance_info=False
+ ):
"""traverse all commits in the history of commit identified by commit_id and check
if the serialization works.
:param print_performance_info: if True, we will show how fast we are"""
- ns = 0 # num serializations
- nds = 0 # num deserializations
+ ns = 0 # num serializations
+ nds = 0 # num deserializations
st = time.time()
for cm in rwrepo.commit(commit_id).traverse():
@@ -53,12 +49,22 @@ class TestCommitSerialization(TestBase):
stream.seek(0)
istream = rwrepo.odb.store(IStream(Commit.type, streamlen, stream))
- self.assertEqual(istream.hexsha, cm.hexsha.encode('ascii'))
-
- nc = Commit(rwrepo, Commit.NULL_BIN_SHA, cm.tree,
- cm.author, cm.authored_date, cm.author_tz_offset,
- cm.committer, cm.committed_date, cm.committer_tz_offset,
- cm.message, cm.parents, cm.encoding)
+ self.assertEqual(istream.hexsha, cm.hexsha.encode("ascii"))
+
+ nc = Commit(
+ rwrepo,
+ Commit.NULL_BIN_SHA,
+ cm.tree,
+ cm.author,
+ cm.authored_date,
+ cm.author_tz_offset,
+ cm.committer,
+ cm.committed_date,
+ cm.committer_tz_offset,
+ cm.message,
+ cm.parents,
+ cm.encoding,
+ )
self.assertEqual(nc.parents, cm.parents)
stream = BytesIO()
@@ -79,55 +85,65 @@ class TestCommitSerialization(TestBase):
elapsed = time.time() - st
if print_performance_info:
- print("Serialized %i and deserialized %i commits in %f s ( (%f, %f) commits / s"
- % (ns, nds, elapsed, ns / elapsed, nds / elapsed), file=sys.stderr)
+ print(
+ "Serialized %i and deserialized %i commits in %f s ( (%f, %f) commits / s"
+ % (ns, nds, elapsed, ns / elapsed, nds / elapsed),
+ file=sys.stderr,
+ )
# END handle performance info
class TestCommit(TestCommitSerialization):
-
def test_bake(self):
- commit = self.rorepo.commit('2454ae89983a4496a445ce347d7a41c0bb0ea7ae')
+ commit = self.rorepo.commit("2454ae89983a4496a445ce347d7a41c0bb0ea7ae")
# commits have no dict
- self.assertRaises(AttributeError, setattr, commit, 'someattr', 1)
+ self.assertRaises(AttributeError, setattr, commit, "someattr", 1)
commit.author # bake
self.assertEqual("Sebastian Thiel", commit.author.name)
self.assertEqual("byronimo@gmail.com", commit.author.email)
self.assertEqual(commit.author, commit.committer)
- assert isinstance(commit.authored_date, int) and isinstance(commit.committed_date, int)
- assert isinstance(commit.author_tz_offset, int) and isinstance(commit.committer_tz_offset, int)
- self.assertEqual(commit.message, "Added missing information to docstrings of commit and stats module\n")
+ assert isinstance(commit.authored_date, int) and isinstance(
+ commit.committed_date, int
+ )
+ assert isinstance(commit.author_tz_offset, int) and isinstance(
+ commit.committer_tz_offset, int
+ )
+ self.assertEqual(
+ commit.message,
+ "Added missing information to docstrings of commit and stats module\n",
+ )
def test_replace_no_changes(self):
- old_commit = self.rorepo.commit('2454ae89983a4496a445ce347d7a41c0bb0ea7ae')
+ old_commit = self.rorepo.commit("2454ae89983a4496a445ce347d7a41c0bb0ea7ae")
new_commit = old_commit.replace()
for attr in old_commit.__slots__:
assert getattr(new_commit, attr) == getattr(old_commit, attr)
def test_replace_new_sha(self):
- commit = self.rorepo.commit('2454ae89983a4496a445ce347d7a41c0bb0ea7ae')
- new_commit = commit.replace(message='Added replace method')
+ commit = self.rorepo.commit("2454ae89983a4496a445ce347d7a41c0bb0ea7ae")
+ new_commit = commit.replace(message="Added replace method")
- assert new_commit.hexsha == 'fc84cbecac1bd4ba4deaac07c1044889edd536e6'
- assert new_commit.message == 'Added replace method'
+ assert new_commit.hexsha == "fc84cbecac1bd4ba4deaac07c1044889edd536e6"
+ assert new_commit.message == "Added replace method"
def test_replace_invalid_attribute(self):
- commit = self.rorepo.commit('2454ae89983a4496a445ce347d7a41c0bb0ea7ae')
+ commit = self.rorepo.commit("2454ae89983a4496a445ce347d7a41c0bb0ea7ae")
with self.assertRaises(ValueError):
- commit.replace(badattr='This will never work')
+ commit.replace(badattr="This will never work")
def test_stats(self):
- commit = self.rorepo.commit('33ebe7acec14b25c5f84f35a664803fcab2f7781')
+ commit = self.rorepo.commit("33ebe7acec14b25c5f84f35a664803fcab2f7781")
stats = commit.stats
def check_entries(d):
assert isinstance(d, dict)
for key in ("insertions", "deletions", "lines"):
assert key in d
+
# END assertion helper
assert stats.files
assert stats.total
@@ -179,19 +195,24 @@ class TestCommit(TestCommitSerialization):
# at some point, both iterations should stop
self.assertEqual(list(bfirst)[-1], first)
- stoptraverse = self.rorepo.commit("254d04aa3180eb8b8daf7b7ff25f010cd69b4e7d").traverse(ignore_self=0,
- as_edge=True)
+ stoptraverse = self.rorepo.commit(
+ "254d04aa3180eb8b8daf7b7ff25f010cd69b4e7d"
+ ).traverse(ignore_self=0, as_edge=True)
stoptraverse_list = list(stoptraverse)
for itemtup in stoptraverse_list:
- self.assertIsInstance(itemtup, (tuple)) and self.assertEqual(len(itemtup), 2) # as_edge=True -> tuple
+ self.assertIsInstance(itemtup, (tuple)) and self.assertEqual(
+ len(itemtup), 2
+ ) # as_edge=True -> tuple
src, item = itemtup
self.assertIsInstance(item, Commit)
if src:
self.assertIsInstance(src, Commit)
else:
- self.assertIsNone(src) # ignore_self=0 -> first is (None, Commit)
+ self.assertIsNone(src) # ignore_self=0 -> first is (None, Commit)
- stoptraverse = self.rorepo.commit("254d04aa3180eb8b8daf7b7ff25f010cd69b4e7d").traverse(as_edge=True)
+ stoptraverse = self.rorepo.commit(
+ "254d04aa3180eb8b8daf7b7ff25f010cd69b4e7d"
+ ).traverse(as_edge=True)
self.assertEqual(len(next(stoptraverse)), 2)
# ignore self
@@ -201,10 +222,14 @@ class TestCommit(TestCommitSerialization):
self.assertEqual(len(list(start.traverse(ignore_self=False, depth=0))), 1)
# prune
- self.assertEqual(next(start.traverse(branch_first=1, prune=lambda i, d: i == p0)), p1)
+ self.assertEqual(
+ next(start.traverse(branch_first=1, prune=lambda i, d: i == p0)), p1
+ )
# predicate
- self.assertEqual(next(start.traverse(branch_first=1, predicate=lambda i, d: i == p1)), p1)
+ self.assertEqual(
+ next(start.traverse(branch_first=1, predicate=lambda i, d: i == p1)), p1
+ )
# traversal should stop when the beginning is reached
self.assertRaises(StopIteration, next, first.traverse())
@@ -220,64 +245,78 @@ class TestCommit(TestCommitSerialization):
self.assertEqual(all_commits, list(self.rorepo.iter_commits()))
# this includes merge commits
- mcomit = self.rorepo.commit('d884adc80c80300b4cc05321494713904ef1df2d')
+ mcomit = self.rorepo.commit("d884adc80c80300b4cc05321494713904ef1df2d")
assert mcomit in all_commits
# we can limit the result to paths
- ltd_commits = list(self.rorepo.iter_commits(paths='CHANGES'))
+ ltd_commits = list(self.rorepo.iter_commits(paths="CHANGES"))
assert ltd_commits and len(ltd_commits) < len(all_commits)
# show commits of multiple paths, resulting in a union of commits
- less_ltd_commits = list(Commit.iter_items(self.rorepo, 'master', paths=('CHANGES', 'AUTHORS')))
+ less_ltd_commits = list(
+ Commit.iter_items(self.rorepo, "master", paths=("CHANGES", "AUTHORS"))
+ )
assert len(ltd_commits) < len(less_ltd_commits)
class Child(Commit):
def __init__(self, *args, **kwargs):
super(Child, self).__init__(*args, **kwargs)
- child_commits = list(Child.iter_items(self.rorepo, 'master', paths=('CHANGES', 'AUTHORS')))
+ child_commits = list(
+ Child.iter_items(self.rorepo, "master", paths=("CHANGES", "AUTHORS"))
+ )
assert type(child_commits[0]) == Child
def test_iter_items(self):
# pretty not allowed
- self.assertRaises(ValueError, Commit.iter_items, self.rorepo, 'master', pretty="raw")
+ self.assertRaises(
+ ValueError, Commit.iter_items, self.rorepo, "master", pretty="raw"
+ )
def test_rev_list_bisect_all(self):
"""
'git rev-list --bisect-all' returns additional information
in the commit header. This test ensures that we properly parse it.
"""
- revs = self.rorepo.git.rev_list('933d23bf95a5bd1624fbcdf328d904e1fa173474',
- first_parent=True,
- bisect_all=True)
+ revs = self.rorepo.git.rev_list(
+ "933d23bf95a5bd1624fbcdf328d904e1fa173474",
+ first_parent=True,
+ bisect_all=True,
+ )
- commits = Commit._iter_from_process_or_stream(self.rorepo, StringProcessAdapter(revs.encode('ascii')))
+ commits = Commit._iter_from_process_or_stream(
+ self.rorepo, StringProcessAdapter(revs.encode("ascii"))
+ )
expected_ids = (
- '7156cece3c49544abb6bf7a0c218eb36646fad6d',
- '1f66cfbbce58b4b552b041707a12d437cc5f400a',
- '33ebe7acec14b25c5f84f35a664803fcab2f7781',
- '933d23bf95a5bd1624fbcdf328d904e1fa173474'
+ "7156cece3c49544abb6bf7a0c218eb36646fad6d",
+ "1f66cfbbce58b4b552b041707a12d437cc5f400a",
+ "33ebe7acec14b25c5f84f35a664803fcab2f7781",
+ "933d23bf95a5bd1624fbcdf328d904e1fa173474",
)
for sha1, commit in zip(expected_ids, commits):
self.assertEqual(sha1, commit.hexsha)
@with_rw_directory
def test_ambiguous_arg_iteration(self, rw_dir):
- rw_repo = Repo.init(osp.join(rw_dir, 'test_ambiguous_arg'))
- path = osp.join(str(rw_repo.working_tree_dir), 'master')
+ rw_repo = Repo.init(osp.join(rw_dir, "test_ambiguous_arg"))
+ path = osp.join(str(rw_repo.working_tree_dir), "master")
touch(path)
rw_repo.index.add([path])
- rw_repo.index.commit('initial commit')
+ rw_repo.index.commit("initial commit")
list(rw_repo.iter_commits(rw_repo.head.ref)) # should fail unless bug is fixed
def test_count(self):
- self.assertEqual(self.rorepo.tag('refs/tags/0.1.5').commit.count(), 143)
+ self.assertEqual(self.rorepo.tag("refs/tags/0.1.5").commit.count(), 143)
def test_list(self):
# This doesn't work anymore, as we will either attempt getattr with bytes, or compare 20 byte string
# with actual 20 byte bytes. This usage makes no sense anyway
- assert isinstance(Commit.list_items(self.rorepo, '0.1.5', max_count=5)[
- '5117c9c8a4d3af19a9958677e45cda9269de1541'], Commit)
+ assert isinstance(
+ Commit.list_items(self.rorepo, "0.1.5", max_count=5)[
+ "5117c9c8a4d3af19a9958677e45cda9269de1541"
+ ],
+ Commit,
+ )
def test_str(self):
commit = Commit(self.rorepo, Commit.NULL_BIN_SHA)
@@ -296,7 +335,7 @@ class TestCommit(TestCommitSerialization):
def test_iter_parents(self):
# should return all but ourselves, even if skip is defined
- c = self.rorepo.commit('0.1.5')
+ c = self.rorepo.commit("0.1.5")
for skip in (0, 1):
piter = c.iter_parents(skip=skip)
first_parent = next(piter)
@@ -308,18 +347,18 @@ class TestCommit(TestCommitSerialization):
name_rev = self.rorepo.head.commit.name_rev
assert isinstance(name_rev, str)
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_serialization(self, rwrepo):
# create all commits of our repo
- self.assert_commit_serialization(rwrepo, '0.1.6')
+ self.assert_commit_serialization(rwrepo, "0.1.6")
def test_serialization_unicode_support(self):
- self.assertEqual(Commit.default_encoding.lower(), 'utf-8')
+ self.assertEqual(Commit.default_encoding.lower(), "utf-8")
# create a commit with unicode in the message, and the author's name
# Verify its serialization and deserialization
- cmt = self.rorepo.commit('0.1.6')
- assert isinstance(cmt.message, str) # it automatically decodes it as such
+ cmt = self.rorepo.commit("0.1.6")
+ assert isinstance(cmt.message, str) # it automatically decodes it as such
assert isinstance(cmt.author.name, str) # same here
cmt.message = "üäêèß"
@@ -344,15 +383,15 @@ class TestCommit(TestCommitSerialization):
def test_invalid_commit(self):
cmt = self.rorepo.commit()
- with open(fixture_path('commit_invalid_data'), 'rb') as fd:
+ with open(fixture_path("commit_invalid_data"), "rb") as fd:
cmt._deserialize(fd)
- self.assertEqual(cmt.author.name, 'E.Azer Ko�o�o�oculu', cmt.author.name)
- self.assertEqual(cmt.author.email, 'azer@kodfabrik.com', cmt.author.email)
+ self.assertEqual(cmt.author.name, "E.Azer Ko�o�o�oculu", cmt.author.name)
+ self.assertEqual(cmt.author.email, "azer@kodfabrik.com", cmt.author.email)
def test_gpgsig(self):
cmt = self.rorepo.commit()
- with open(fixture_path('commit_with_gpgsig'), 'rb') as fd:
+ with open(fixture_path("commit_with_gpgsig"), "rb") as fd:
cmt._deserialize(fd)
fixture_sig = """-----BEGIN PGP SIGNATURE-----
@@ -379,7 +418,11 @@ JzJMZDRLQLFvnzqZuCjE
cstream = BytesIO()
cmt._serialize(cstream)
- assert re.search(r"^gpgsig <test\n dummy\n sig>$", cstream.getvalue().decode('ascii'), re.MULTILINE)
+ assert re.search(
+ r"^gpgsig <test\n dummy\n sig>$",
+ cstream.getvalue().decode("ascii"),
+ re.MULTILINE,
+ )
self.assert_gpgsig_deserialization(cstream)
@@ -391,10 +434,12 @@ JzJMZDRLQLFvnzqZuCjE
cmt.gpgsig = None
cstream = BytesIO()
cmt._serialize(cstream)
- assert not re.search(r"^gpgsig ", cstream.getvalue().decode('ascii'), re.MULTILINE)
+ assert not re.search(
+ r"^gpgsig ", cstream.getvalue().decode("ascii"), re.MULTILINE
+ )
def assert_gpgsig_deserialization(self, cstream):
- assert 'gpgsig' in 'precondition: need gpgsig'
+ assert "gpgsig" in "precondition: need gpgsig"
class RepoMock:
def __init__(self, bytestr):
@@ -407,29 +452,40 @@ JzJMZDRLQLFvnzqZuCjE
self.bytestr = bytestr
def stream(self, *args):
- stream = Mock(spec_set=['read'], return_value=self.bytestr)
+ stream = Mock(spec_set=["read"], return_value=self.bytestr)
stream.read.return_value = self.bytestr
- return ('binsha', 'typename', 'size', stream)
+ return ("binsha", "typename", "size", stream)
return ODBMock(self.bytestr)
repo_mock = RepoMock(cstream.getvalue())
for field in Commit.__slots__:
- c = Commit(repo_mock, b'x' * 20)
+ c = Commit(repo_mock, b"x" * 20)
assert getattr(c, field) is not None
def test_datetimes(self):
- commit = self.rorepo.commit('4251bd5')
+ commit = self.rorepo.commit("4251bd5")
self.assertEqual(commit.authored_date, 1255018625)
self.assertEqual(commit.committed_date, 1255026171)
- self.assertEqual(commit.authored_datetime,
- datetime(2009, 10, 8, 18, 17, 5, tzinfo=tzoffset(-7200)), commit.authored_datetime) # noqa
- self.assertEqual(commit.authored_datetime,
- datetime(2009, 10, 8, 16, 17, 5, tzinfo=utc), commit.authored_datetime)
- self.assertEqual(commit.committed_datetime,
- datetime(2009, 10, 8, 20, 22, 51, tzinfo=tzoffset(-7200)))
- self.assertEqual(commit.committed_datetime,
- datetime(2009, 10, 8, 18, 22, 51, tzinfo=utc), commit.committed_datetime)
+ self.assertEqual(
+ commit.authored_datetime,
+ datetime(2009, 10, 8, 18, 17, 5, tzinfo=tzoffset(-7200)),
+ commit.authored_datetime,
+ ) # noqa
+ self.assertEqual(
+ commit.authored_datetime,
+ datetime(2009, 10, 8, 16, 17, 5, tzinfo=utc),
+ commit.authored_datetime,
+ )
+ self.assertEqual(
+ commit.committed_datetime,
+ datetime(2009, 10, 8, 20, 22, 51, tzinfo=tzoffset(-7200)),
+ )
+ self.assertEqual(
+ commit.committed_datetime,
+ datetime(2009, 10, 8, 18, 22, 51, tzinfo=utc),
+ commit.committed_datetime,
+ )
def test_trailers(self):
KEY_1 = "Hello"
@@ -440,12 +496,18 @@ JzJMZDRLQLFvnzqZuCjE
# Check if KEY 1 & 2 with Value 1 & 2 is extracted from multiple msg variations
msgs = []
msgs.append(f"Subject\n\n{KEY_1}: {VALUE_1}\n{KEY_2}: {VALUE_2}\n")
- msgs.append(f"Subject\n \nSome body of a function\n \n{KEY_1}: {VALUE_1}\n{KEY_2}: {VALUE_2}\n")
- msgs.append(f"Subject\n \nSome body of a function\n\nnon-key: non-value\n\n{KEY_1}: {VALUE_1}\n{KEY_2}: {VALUE_2}\n")
- msgs.append(f"Subject\n \nSome multiline\n body of a function\n\nnon-key: non-value\n\n{KEY_1}: {VALUE_1}\n{KEY_2} : {VALUE_2}\n")
+ msgs.append(
+ f"Subject\n \nSome body of a function\n \n{KEY_1}: {VALUE_1}\n{KEY_2}: {VALUE_2}\n"
+ )
+ msgs.append(
+ f"Subject\n \nSome body of a function\n\nnon-key: non-value\n\n{KEY_1}: {VALUE_1}\n{KEY_2}: {VALUE_2}\n"
+ )
+ msgs.append(
+ f"Subject\n \nSome multiline\n body of a function\n\nnon-key: non-value\n\n{KEY_1}: {VALUE_1}\n{KEY_2} : {VALUE_2}\n"
+ )
for msg in msgs:
- commit = self.rorepo.commit('master')
+ commit = self.rorepo.commit("master")
commit = copy.copy(commit)
commit.message = msg
assert KEY_1 in commit.trailers.keys()
@@ -457,21 +519,27 @@ JzJMZDRLQLFvnzqZuCjE
msgs = []
msgs.append(f"Subject\n")
msgs.append(f"Subject\n\nBody with some\nText\n")
- msgs.append(f"Subject\n\nBody with\nText\n\nContinuation but\n doesn't contain colon\n")
- msgs.append(f"Subject\n\nBody with\nText\n\nContinuation but\n only contains one :\n")
+ msgs.append(
+ f"Subject\n\nBody with\nText\n\nContinuation but\n doesn't contain colon\n"
+ )
+ msgs.append(
+ f"Subject\n\nBody with\nText\n\nContinuation but\n only contains one :\n"
+ )
msgs.append(f"Subject\n\nBody with\nText\n\nKey: Value\nLine without colon\n")
msgs.append(f"Subject\n\nBody with\nText\n\nLine without colon\nKey: Value\n")
for msg in msgs:
- commit = self.rorepo.commit('master')
+ commit = self.rorepo.commit("master")
commit = copy.copy(commit)
commit.message = msg
assert len(commit.trailers.keys()) == 0
# check that only the last key value paragraph is evaluated
- commit = self.rorepo.commit('master')
+ commit = self.rorepo.commit("master")
commit = copy.copy(commit)
- commit.message = f"Subject\n\nMultiline\nBody\n\n{KEY_1}: {VALUE_1}\n\n{KEY_2}: {VALUE_2}\n"
+ commit.message = (
+ f"Subject\n\nMultiline\nBody\n\n{KEY_1}: {VALUE_1}\n\n{KEY_2}: {VALUE_2}\n"
+ )
assert KEY_1 not in commit.trailers.keys()
assert KEY_2 in commit.trailers.keys()
assert commit.trailers[KEY_2] == VALUE_2
diff --git a/test/test_config.py b/test/test_config.py
index 50d9b010..45677b0d 100644
--- a/test/test_config.py
+++ b/test/test_config.py
@@ -9,9 +9,7 @@ import io
import os
from unittest import mock
-from git import (
- GitConfigParser
-)
+from git import GitConfigParser
from git.config import _OMD, cp
from test.lib import (
TestCase,
@@ -24,7 +22,7 @@ import os.path as osp
from git.util import rmfile
-_tc_lock_fpaths = osp.join(osp.dirname(__file__), 'fixtures/*.lock')
+_tc_lock_fpaths = osp.join(osp.dirname(__file__), "fixtures/*.lock")
def _rm_lock_files():
@@ -39,7 +37,9 @@ class TestBase(TestCase):
def tearDown(self):
for lfp in glob.glob(_tc_lock_fpaths):
if osp.isfile(lfp):
- raise AssertionError('Previous TC left hanging git-lock file: {}'.format(lfp))
+ raise AssertionError(
+ "Previous TC left hanging git-lock file: {}".format(lfp)
+ )
def _to_memcache(self, file_path):
with open(file_path, "rb") as fp:
@@ -52,13 +52,16 @@ class TestBase(TestCase):
for filename in ("git_config", "git_config_global"):
file_obj = self._to_memcache(fixture_path(filename))
with GitConfigParser(file_obj, read_only=False) as w_config:
- w_config.read() # enforce reading
+ w_config.read() # enforce reading
assert w_config._sections
- w_config.write() # enforce writing
+ w_config.write() # enforce writing
# we stripped lines when reading, so the results differ
assert file_obj.getvalue()
- self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue())
+ self.assertEqual(
+ file_obj.getvalue(),
+ self._to_memcache(fixture_path(filename)).getvalue(),
+ )
# creating an additional config writer must fail due to exclusive access
with self.assertRaises(IOError):
@@ -91,29 +94,31 @@ class TestBase(TestCase):
# END for each filename
def test_includes_order(self):
- with GitConfigParser(list(map(fixture_path, ("git_config", "git_config_global")))) as r_config:
- r_config.read() # enforce reading
+ with GitConfigParser(
+ list(map(fixture_path, ("git_config", "git_config_global")))
+ ) as r_config:
+ r_config.read() # enforce reading
# Simple inclusions, again checking them taking precedence
- assert r_config.get_value('sec', 'var0') == "value0_included"
+ assert r_config.get_value("sec", "var0") == "value0_included"
# This one should take the git_config_global value since included
# values must be considered as soon as they get them
- assert r_config.get_value('diff', 'tool') == "meld"
+ assert r_config.get_value("diff", "tool") == "meld"
try:
- assert r_config.get_value('sec', 'var1') == "value1_main"
+ assert r_config.get_value("sec", "var1") == "value1_main"
except AssertionError as e:
raise SkipTest(
- 'Known failure -- included values are not in effect right away'
+ "Known failure -- included values are not in effect right away"
) from e
@with_rw_directory
def test_lock_reentry(self, rw_dir):
- fpl = osp.join(rw_dir, 'l')
+ fpl = osp.join(rw_dir, "l")
gcp = GitConfigParser(fpl, read_only=False)
with gcp as cw:
- cw.set_value('include', 'some_value', 'a')
+ cw.set_value("include", "some_value", "a")
# entering again locks the file again...
with gcp as cw:
- cw.set_value('include', 'some_other_value', 'b')
+ cw.set_value("include", "some_other_value", "b")
# ...so creating an additional config writer must fail due to exclusive access
with self.assertRaises(IOError):
GitConfigParser(fpl, read_only=False)
@@ -136,10 +141,12 @@ class TestBase(TestCase):
ev += " end\n"
ev += " File.open(%(%A), %(w)) {|f| f.write(b)}\n"
ev += " exit 1 if b.include?(%(<)*%L)'"
- self.assertEqual(config.get('merge "railsschema"', 'driver'), ev)
- self.assertEqual(config.get('alias', 'lg'),
- "log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr)%Creset'"
- " --abbrev-commit --date=relative")
+ self.assertEqual(config.get('merge "railsschema"', "driver"), ev)
+ self.assertEqual(
+ config.get("alias", "lg"),
+ "log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr)%Creset'"
+ " --abbrev-commit --date=relative",
+ )
self.assertEqual(len(config.sections()), 23)
def test_base(self):
@@ -186,47 +193,51 @@ class TestBase(TestCase):
@with_rw_directory
def test_config_include(self, rw_dir):
def write_test_value(cw, value):
- cw.set_value(value, 'value', value)
+ cw.set_value(value, "value", value)
+
# end
def check_test_value(cr, value):
- assert cr.get_value(value, 'value') == value
+ assert cr.get_value(value, "value") == value
+
# end
# PREPARE CONFIG FILE A
- fpa = osp.join(rw_dir, 'a')
+ fpa = osp.join(rw_dir, "a")
with GitConfigParser(fpa, read_only=False) as cw:
- write_test_value(cw, 'a')
-
- fpb = osp.join(rw_dir, 'b')
- fpc = osp.join(rw_dir, 'c')
- cw.set_value('include', 'relative_path_b', 'b')
- cw.set_value('include', 'doesntexist', 'foobar')
- cw.set_value('include', 'relative_cycle_a_a', 'a')
- cw.set_value('include', 'absolute_cycle_a_a', fpa)
+ write_test_value(cw, "a")
+
+ fpb = osp.join(rw_dir, "b")
+ fpc = osp.join(rw_dir, "c")
+ cw.set_value("include", "relative_path_b", "b")
+ cw.set_value("include", "doesntexist", "foobar")
+ cw.set_value("include", "relative_cycle_a_a", "a")
+ cw.set_value("include", "absolute_cycle_a_a", fpa)
assert osp.exists(fpa)
# PREPARE CONFIG FILE B
with GitConfigParser(fpb, read_only=False) as cw:
- write_test_value(cw, 'b')
- cw.set_value('include', 'relative_cycle_b_a', 'a')
- cw.set_value('include', 'absolute_cycle_b_a', fpa)
- cw.set_value('include', 'relative_path_c', 'c')
- cw.set_value('include', 'absolute_path_c', fpc)
+ write_test_value(cw, "b")
+ cw.set_value("include", "relative_cycle_b_a", "a")
+ cw.set_value("include", "absolute_cycle_b_a", fpa)
+ cw.set_value("include", "relative_path_c", "c")
+ cw.set_value("include", "absolute_path_c", fpc)
# PREPARE CONFIG FILE C
with GitConfigParser(fpc, read_only=False) as cw:
- write_test_value(cw, 'c')
+ write_test_value(cw, "c")
with GitConfigParser(fpa, read_only=True) as cr:
- for tv in ('a', 'b', 'c'):
+ for tv in ("a", "b", "c"):
check_test_value(cr, tv)
# end for each test to verify
- assert len(cr.items('include')) == 8, "Expected all include sections to be merged"
+ assert (
+ len(cr.items("include")) == 8
+ ), "Expected all include sections to be merged"
# test writable config writers - assure write-back doesn't involve includes
with GitConfigParser(fpa, read_only=False, merge_includes=True) as cw:
- tv = 'x'
+ tv = "x"
write_test_value(cw, tv)
with GitConfigParser(fpa, read_only=True) as cr:
@@ -252,7 +263,7 @@ class TestBase(TestCase):
# Initiate config files.
path1 = osp.join(rw_dir, "config1")
path2 = osp.join(rw_dir, "config2")
- template = "[includeIf \"{}:{}\"]\n path={}\n"
+ template = '[includeIf "{}:{}"]\n path={}\n'
with open(path1, "w") as stream:
stream.write(template.format("gitdir", git_dir, path2))
@@ -319,7 +330,7 @@ class TestBase(TestCase):
# Initiate config files.
path1 = osp.join(rw_dir, "config1")
path2 = osp.join(rw_dir, "config2")
- template = "[includeIf \"onbranch:{}\"]\n path={}\n"
+ template = '[includeIf "onbranch:{}"]\n path={}\n'
# Ensure that config is included is branch is correct.
with open(path1, "w") as stream:
@@ -356,14 +367,14 @@ class TestBase(TestCase):
# Ensure that config is ignored when active branch cannot be found.
with open(path1, "w") as stream:
- stream.write("[includeIf \"onbranch:foo\"]\n path=/path\n")
+ stream.write('[includeIf "onbranch:foo"]\n path=/path\n')
with GitConfigParser(path1, repo=repo) as config:
assert not config._has_includes()
assert config._included_paths() == []
def test_rename(self):
- file_obj = self._to_memcache(fixture_path('git_config'))
+ file_obj = self._to_memcache(fixture_path("git_config"))
with GitConfigParser(file_obj, read_only=False, merge_includes=False) as cw:
with self.assertRaises(ValueError):
cw.rename_section("doesntexist", "foo")
@@ -371,127 +382,161 @@ class TestBase(TestCase):
cw.rename_section("core", "include")
nn = "bee"
- assert cw.rename_section('core', nn) is cw
- assert not cw.has_section('core')
+ assert cw.rename_section("core", nn) is cw
+ assert not cw.has_section("core")
assert len(cw.items(nn)) == 4
def test_complex_aliases(self):
- file_obj = self._to_memcache(fixture_path('.gitconfig'))
+ file_obj = self._to_memcache(fixture_path(".gitconfig"))
with GitConfigParser(file_obj, read_only=False) as w_config:
- self.assertEqual(w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"')
- self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
+ self.assertEqual(
+ w_config.get("alias", "rbi"),
+ '"!g() { git rebase -i origin/${1:-master} ; } ; g"',
+ )
+ self.assertEqual(
+ file_obj.getvalue(),
+ self._to_memcache(fixture_path(".gitconfig")).getvalue(),
+ )
def test_empty_config_value(self):
- cr = GitConfigParser(fixture_path('git_config_with_empty_value'), read_only=True)
+ cr = GitConfigParser(
+ fixture_path("git_config_with_empty_value"), read_only=True
+ )
- assert cr.get_value('core', 'filemode'), "Should read keys with values"
+ assert cr.get_value("core", "filemode"), "Should read keys with values"
with self.assertRaises(cp.NoOptionError):
- cr.get_value('color', 'ui')
+ cr.get_value("color", "ui")
def test_multiple_values(self):
- file_obj = self._to_memcache(fixture_path('git_config_multiple'))
+ file_obj = self._to_memcache(fixture_path("git_config_multiple"))
with GitConfigParser(file_obj, read_only=False) as cw:
- self.assertEqual(cw.get('section0', 'option0'), 'value0')
- self.assertEqual(cw.get_values('section0', 'option0'), ['value0'])
- self.assertEqual(cw.items('section0'), [('option0', 'value0')])
+ self.assertEqual(cw.get("section0", "option0"), "value0")
+ self.assertEqual(cw.get_values("section0", "option0"), ["value0"])
+ self.assertEqual(cw.items("section0"), [("option0", "value0")])
# Where there are multiple values, "get" returns the last.
- self.assertEqual(cw.get('section1', 'option1'), 'value1b')
- self.assertEqual(cw.get_values('section1', 'option1'),
- ['value1a', 'value1b'])
- self.assertEqual(cw.items('section1'),
- [('option1', 'value1b'),
- ('other_option1', 'other_value1')])
- self.assertEqual(cw.items_all('section1'),
- [('option1', ['value1a', 'value1b']),
- ('other_option1', ['other_value1'])])
+ self.assertEqual(cw.get("section1", "option1"), "value1b")
+ self.assertEqual(
+ cw.get_values("section1", "option1"), ["value1a", "value1b"]
+ )
+ self.assertEqual(
+ cw.items("section1"),
+ [("option1", "value1b"), ("other_option1", "other_value1")],
+ )
+ self.assertEqual(
+ cw.items_all("section1"),
+ [
+ ("option1", ["value1a", "value1b"]),
+ ("other_option1", ["other_value1"]),
+ ],
+ )
with self.assertRaises(KeyError):
- cw.get_values('section1', 'missing')
+ cw.get_values("section1", "missing")
- self.assertEqual(cw.get_values('section1', 'missing', 1), [1])
- self.assertEqual(cw.get_values('section1', 'missing', 's'), ['s'])
+ self.assertEqual(cw.get_values("section1", "missing", 1), [1])
+ self.assertEqual(cw.get_values("section1", "missing", "s"), ["s"])
def test_multiple_values_rename(self):
- file_obj = self._to_memcache(fixture_path('git_config_multiple'))
+ file_obj = self._to_memcache(fixture_path("git_config_multiple"))
with GitConfigParser(file_obj, read_only=False) as cw:
- cw.rename_section('section1', 'section2')
+ cw.rename_section("section1", "section2")
cw.write()
file_obj.seek(0)
cr = GitConfigParser(file_obj, read_only=True)
- self.assertEqual(cr.get_value('section2', 'option1'), 'value1b')
- self.assertEqual(cr.get_values('section2', 'option1'),
- ['value1a', 'value1b'])
- self.assertEqual(cr.items('section2'),
- [('option1', 'value1b'),
- ('other_option1', 'other_value1')])
- self.assertEqual(cr.items_all('section2'),
- [('option1', ['value1a', 'value1b']),
- ('other_option1', ['other_value1'])])
+ self.assertEqual(cr.get_value("section2", "option1"), "value1b")
+ self.assertEqual(
+ cr.get_values("section2", "option1"), ["value1a", "value1b"]
+ )
+ self.assertEqual(
+ cr.items("section2"),
+ [("option1", "value1b"), ("other_option1", "other_value1")],
+ )
+ self.assertEqual(
+ cr.items_all("section2"),
+ [
+ ("option1", ["value1a", "value1b"]),
+ ("other_option1", ["other_value1"]),
+ ],
+ )
def test_multiple_to_single(self):
- file_obj = self._to_memcache(fixture_path('git_config_multiple'))
+ file_obj = self._to_memcache(fixture_path("git_config_multiple"))
with GitConfigParser(file_obj, read_only=False) as cw:
- cw.set_value('section1', 'option1', 'value1c')
+ cw.set_value("section1", "option1", "value1c")
cw.write()
file_obj.seek(0)
cr = GitConfigParser(file_obj, read_only=True)
- self.assertEqual(cr.get_value('section1', 'option1'), 'value1c')
- self.assertEqual(cr.get_values('section1', 'option1'), ['value1c'])
- self.assertEqual(cr.items('section1'),
- [('option1', 'value1c'),
- ('other_option1', 'other_value1')])
- self.assertEqual(cr.items_all('section1'),
- [('option1', ['value1c']),
- ('other_option1', ['other_value1'])])
+ self.assertEqual(cr.get_value("section1", "option1"), "value1c")
+ self.assertEqual(cr.get_values("section1", "option1"), ["value1c"])
+ self.assertEqual(
+ cr.items("section1"),
+ [("option1", "value1c"), ("other_option1", "other_value1")],
+ )
+ self.assertEqual(
+ cr.items_all("section1"),
+ [("option1", ["value1c"]), ("other_option1", ["other_value1"])],
+ )
def test_single_to_multiple(self):
- file_obj = self._to_memcache(fixture_path('git_config_multiple'))
+ file_obj = self._to_memcache(fixture_path("git_config_multiple"))
with GitConfigParser(file_obj, read_only=False) as cw:
- cw.add_value('section1', 'other_option1', 'other_value1a')
+ cw.add_value("section1", "other_option1", "other_value1a")
cw.write()
file_obj.seek(0)
cr = GitConfigParser(file_obj, read_only=True)
- self.assertEqual(cr.get_value('section1', 'option1'), 'value1b')
- self.assertEqual(cr.get_values('section1', 'option1'),
- ['value1a', 'value1b'])
- self.assertEqual(cr.get_value('section1', 'other_option1'),
- 'other_value1a')
- self.assertEqual(cr.get_values('section1', 'other_option1'),
- ['other_value1', 'other_value1a'])
- self.assertEqual(cr.items('section1'),
- [('option1', 'value1b'),
- ('other_option1', 'other_value1a')])
+ self.assertEqual(cr.get_value("section1", "option1"), "value1b")
+ self.assertEqual(
+ cr.get_values("section1", "option1"), ["value1a", "value1b"]
+ )
+ self.assertEqual(cr.get_value("section1", "other_option1"), "other_value1a")
+ self.assertEqual(
+ cr.get_values("section1", "other_option1"),
+ ["other_value1", "other_value1a"],
+ )
self.assertEqual(
- cr.items_all('section1'),
- [('option1', ['value1a', 'value1b']),
- ('other_option1', ['other_value1', 'other_value1a'])])
+ cr.items("section1"),
+ [("option1", "value1b"), ("other_option1", "other_value1a")],
+ )
+ self.assertEqual(
+ cr.items_all("section1"),
+ [
+ ("option1", ["value1a", "value1b"]),
+ ("other_option1", ["other_value1", "other_value1a"]),
+ ],
+ )
def test_add_to_multiple(self):
- file_obj = self._to_memcache(fixture_path('git_config_multiple'))
+ file_obj = self._to_memcache(fixture_path("git_config_multiple"))
with GitConfigParser(file_obj, read_only=False) as cw:
- cw.add_value('section1', 'option1', 'value1c')
+ cw.add_value("section1", "option1", "value1c")
cw.write()
file_obj.seek(0)
cr = GitConfigParser(file_obj, read_only=True)
- self.assertEqual(cr.get_value('section1', 'option1'), 'value1c')
- self.assertEqual(cr.get_values('section1', 'option1'),
- ['value1a', 'value1b', 'value1c'])
- self.assertEqual(cr.items('section1'),
- [('option1', 'value1c'),
- ('other_option1', 'other_value1')])
- self.assertEqual(cr.items_all('section1'),
- [('option1', ['value1a', 'value1b', 'value1c']),
- ('other_option1', ['other_value1'])])
+ self.assertEqual(cr.get_value("section1", "option1"), "value1c")
+ self.assertEqual(
+ cr.get_values("section1", "option1"), ["value1a", "value1b", "value1c"]
+ )
+ self.assertEqual(
+ cr.items("section1"),
+ [("option1", "value1c"), ("other_option1", "other_value1")],
+ )
+ self.assertEqual(
+ cr.items_all("section1"),
+ [
+ ("option1", ["value1a", "value1b", "value1c"]),
+ ("other_option1", ["other_value1"]),
+ ],
+ )
def test_setlast(self):
# Test directly, not covered by higher-level tests.
omd = _OMD()
- omd.setlast('key', 'value1')
- self.assertEqual(omd['key'], 'value1')
- self.assertEqual(omd.getall('key'), ['value1'])
- omd.setlast('key', 'value2')
- self.assertEqual(omd['key'], 'value2')
- self.assertEqual(omd.getall('key'), ['value2'])
+ omd.setlast("key", "value1")
+ self.assertEqual(omd["key"], "value1")
+ self.assertEqual(omd.getall("key"), ["value1"])
+ omd.setlast("key", "value2")
+ self.assertEqual(omd["key"], "value2")
+ self.assertEqual(omd.getall("key"), ["value2"])
diff --git a/test/test_db.py b/test/test_db.py
index f9090fdd..228c70e7 100644
--- a/test/test_db.py
+++ b/test/test_db.py
@@ -12,9 +12,8 @@ import os.path as osp
class TestDB(TestBase):
-
def test_base(self):
- gdb = GitCmdObjectDB(osp.join(self.rorepo.git_dir, 'objects'), self.rorepo.git)
+ gdb = GitCmdObjectDB(osp.join(self.rorepo.git_dir, "objects"), self.rorepo.git)
# partial to complete - works with everything
hexsha = bin_to_hex(gdb.partial_to_complete_sha_hex("0.1.6"))
diff --git a/test/test_diff.py b/test/test_diff.py
index 92e27f5d..10f5d6db 100644
--- a/test/test_diff.py
+++ b/test/test_diff.py
@@ -28,18 +28,18 @@ import os.path as osp
def to_raw(input):
- return input.replace(b'\t', b'\x00')
+ return input.replace(b"\t", b"\x00")
@ddt.ddt
class TestDiff(TestBase):
-
def setUp(self):
self.repo_dir = tempfile.mkdtemp()
self.submodule_dir = tempfile.mkdtemp()
def tearDown(self):
import gc
+
gc.collect()
shutil.rmtree(self.repo_dir)
shutil.rmtree(self.submodule_dir)
@@ -53,9 +53,9 @@ class TestDiff(TestBase):
assert isinstance(diff.b_mode, int)
if diff.a_blob:
- assert not diff.a_blob.path.endswith('\n')
+ assert not diff.a_blob.path.endswith("\n")
if diff.b_blob:
- assert not diff.b_blob.path.endswith('\n')
+ assert not diff.b_blob.path.endswith("\n")
# END for each diff
return diffs
@@ -63,38 +63,47 @@ class TestDiff(TestBase):
def test_diff_with_staged_file(self, rw_dir):
# SETUP INDEX WITH MULTIPLE STAGES
r = Repo.init(rw_dir)
- fp = osp.join(rw_dir, 'hello.txt')
- with open(fp, 'w') as fs:
+ fp = osp.join(rw_dir, "hello.txt")
+ with open(fp, "w") as fs:
fs.write("hello world")
r.git.add(Git.polish_url(fp))
r.git.commit(message="init")
- with open(fp, 'w') as fs:
+ with open(fp, "w") as fs:
fs.write("Hola Mundo")
r.git.add(Git.polish_url(fp))
- self.assertEqual(len(r.index.diff("HEAD", create_patch=True)), 1,
- "create_patch should generate patch of diff to HEAD")
+ self.assertEqual(
+ len(r.index.diff("HEAD", create_patch=True)),
+ 1,
+ "create_patch should generate patch of diff to HEAD",
+ )
r.git.commit(message="change on master")
- self.assertEqual(len(r.index.diff("HEAD", create_patch=True)), 0,
- "create_patch should generate no patch, already on HEAD")
-
- r.git.checkout('HEAD~1', b='topic')
- with open(fp, 'w') as fs:
+ self.assertEqual(
+ len(r.index.diff("HEAD", create_patch=True)),
+ 0,
+ "create_patch should generate no patch, already on HEAD",
+ )
+
+ r.git.checkout("HEAD~1", b="topic")
+ with open(fp, "w") as fs:
fs.write("Hallo Welt")
r.git.commit(all=True, message="change on topic branch")
# there must be a merge-conflict
with self.assertRaises(GitCommandError):
- r.git.cherry_pick('master')
+ r.git.cherry_pick("master")
# Now do the actual testing - this should just work
self.assertEqual(len(r.index.diff(None)), 2)
- self.assertEqual(len(r.index.diff(None, create_patch=True)), 0,
- "This should work, but doesn't right now ... it's OK")
+ self.assertEqual(
+ len(r.index.diff(None, create_patch=True)),
+ 0,
+ "This should work, but doesn't right now ... it's OK",
+ )
def test_list_from_string_new_mode(self):
- output = StringProcessAdapter(fixture('diff_new_mode'))
+ output = StringProcessAdapter(fixture("diff_new_mode"))
diffs = Diff._index_from_patch_format(self.rorepo, output)
self._assert_diff_format(diffs)
@@ -102,7 +111,7 @@ class TestDiff(TestBase):
self.assertEqual(8, len(diffs[0].diff.splitlines()))
def test_diff_with_rename(self):
- output = StringProcessAdapter(fixture('diff_rename'))
+ output = StringProcessAdapter(fixture("diff_rename"))
diffs = Diff._index_from_patch_format(self.rorepo, output)
self._assert_diff_format(diffs)
@@ -111,26 +120,26 @@ class TestDiff(TestBase):
diff = diffs[0]
self.assertTrue(diff.renamed_file)
self.assertTrue(diff.renamed)
- self.assertEqual(diff.rename_from, 'Jérôme')
- self.assertEqual(diff.rename_to, 'müller')
- self.assertEqual(diff.raw_rename_from, b'J\xc3\xa9r\xc3\xb4me')
- self.assertEqual(diff.raw_rename_to, b'm\xc3\xbcller')
+ self.assertEqual(diff.rename_from, "Jérôme")
+ self.assertEqual(diff.rename_to, "müller")
+ self.assertEqual(diff.raw_rename_from, b"J\xc3\xa9r\xc3\xb4me")
+ self.assertEqual(diff.raw_rename_to, b"m\xc3\xbcller")
assert isinstance(str(diff), str)
- output = StringProcessAdapter(to_raw(fixture('diff_rename_raw')))
+ output = StringProcessAdapter(to_raw(fixture("diff_rename_raw")))
diffs = Diff._index_from_raw_format(self.rorepo, output)
self.assertEqual(len(diffs), 1)
diff = diffs[0]
self.assertIsNotNone(diff.renamed_file)
self.assertIsNotNone(diff.renamed)
- self.assertEqual(diff.rename_from, 'this')
- self.assertEqual(diff.rename_to, 'that')
- self.assertEqual(diff.change_type, 'R')
+ self.assertEqual(diff.rename_from, "this")
+ self.assertEqual(diff.rename_to, "that")
+ self.assertEqual(diff.change_type, "R")
self.assertEqual(diff.score, 100)
- self.assertEqual(len(list(diffs.iter_change_type('R'))), 1)
+ self.assertEqual(len(list(diffs.iter_change_type("R"))), 1)
def test_diff_with_copied_file(self):
- output = StringProcessAdapter(fixture('diff_copied_mode'))
+ output = StringProcessAdapter(fixture("diff_copied_mode"))
diffs = Diff._index_from_patch_format(self.rorepo, output)
self._assert_diff_format(diffs)
@@ -138,146 +147,170 @@ class TestDiff(TestBase):
diff = diffs[0]
self.assertTrue(diff.copied_file)
- self.assertTrue(diff.a_path, 'test1.txt')
- self.assertTrue(diff.b_path, 'test2.txt')
+ self.assertTrue(diff.a_path, "test1.txt")
+ self.assertTrue(diff.b_path, "test2.txt")
assert isinstance(str(diff), str)
- output = StringProcessAdapter(to_raw(fixture('diff_copied_mode_raw')))
+ output = StringProcessAdapter(to_raw(fixture("diff_copied_mode_raw")))
diffs = Diff._index_from_raw_format(self.rorepo, output)
self.assertEqual(len(diffs), 1)
diff = diffs[0]
- self.assertEqual(diff.change_type, 'C')
+ self.assertEqual(diff.change_type, "C")
self.assertEqual(diff.score, 100)
- self.assertEqual(diff.a_path, 'test1.txt')
- self.assertEqual(diff.b_path, 'test2.txt')
- self.assertEqual(len(list(diffs.iter_change_type('C'))), 1)
+ self.assertEqual(diff.a_path, "test1.txt")
+ self.assertEqual(diff.b_path, "test2.txt")
+ self.assertEqual(len(list(diffs.iter_change_type("C"))), 1)
def test_diff_with_change_in_type(self):
- output = StringProcessAdapter(fixture('diff_change_in_type'))
+ output = StringProcessAdapter(fixture("diff_change_in_type"))
diffs = Diff._index_from_patch_format(self.rorepo, output)
self._assert_diff_format(diffs)
self.assertEqual(2, len(diffs))
diff = diffs[0]
self.assertIsNotNone(diff.deleted_file)
- self.assertEqual(diff.a_path, 'this')
- self.assertEqual(diff.b_path, 'this')
+ self.assertEqual(diff.a_path, "this")
+ self.assertEqual(diff.b_path, "this")
assert isinstance(str(diff), str)
diff = diffs[1]
self.assertEqual(diff.a_path, None)
- self.assertEqual(diff.b_path, 'this')
+ self.assertEqual(diff.b_path, "this")
self.assertIsNotNone(diff.new_file)
assert isinstance(str(diff), str)
- output = StringProcessAdapter(to_raw(fixture('diff_change_in_type_raw')))
+ output = StringProcessAdapter(to_raw(fixture("diff_change_in_type_raw")))
diffs = Diff._index_from_raw_format(self.rorepo, output)
self.assertEqual(len(diffs), 1)
diff = diffs[0]
self.assertEqual(diff.rename_from, None)
self.assertEqual(diff.rename_to, None)
- self.assertEqual(diff.change_type, 'T')
- self.assertEqual(len(list(diffs.iter_change_type('T'))), 1)
+ self.assertEqual(diff.change_type, "T")
+ self.assertEqual(len(list(diffs.iter_change_type("T"))), 1)
def test_diff_of_modified_files_not_added_to_the_index(self):
- output = StringProcessAdapter(to_raw(fixture('diff_abbrev-40_full-index_M_raw_no-color')))
+ output = StringProcessAdapter(
+ to_raw(fixture("diff_abbrev-40_full-index_M_raw_no-color"))
+ )
diffs = Diff._index_from_raw_format(self.rorepo, output)
- self.assertEqual(len(diffs), 1, 'one modification')
- self.assertEqual(len(list(diffs.iter_change_type('M'))), 1, 'one modification')
- self.assertEqual(diffs[0].change_type, 'M')
- self.assertIsNone(diffs[0].b_blob,)
+ self.assertEqual(len(diffs), 1, "one modification")
+ self.assertEqual(len(list(diffs.iter_change_type("M"))), 1, "one modification")
+ self.assertEqual(diffs[0].change_type, "M")
+ self.assertIsNone(
+ diffs[0].b_blob,
+ )
@ddt.data(
- (Diff._index_from_patch_format, 'diff_patch_binary'),
- (Diff._index_from_raw_format, 'diff_raw_binary')
+ (Diff._index_from_patch_format, "diff_patch_binary"),
+ (Diff._index_from_raw_format, "diff_raw_binary"),
)
def test_binary_diff(self, case):
method, file_name = case
res = method(None, StringProcessAdapter(fixture(file_name)))
self.assertEqual(len(res), 1)
- self.assertEqual(len(list(res.iter_change_type('M'))), 1)
+ self.assertEqual(len(list(res.iter_change_type("M"))), 1)
if res[0].diff:
- self.assertEqual(res[0].diff,
- b"Binary files a/rps and b/rps differ\n",
- "in patch mode, we get a diff text")
+ self.assertEqual(
+ res[0].diff,
+ b"Binary files a/rps and b/rps differ\n",
+ "in patch mode, we get a diff text",
+ )
self.assertIsNotNone(str(res[0]), "This call should just work")
def test_diff_index(self):
- output = StringProcessAdapter(fixture('diff_index_patch'))
+ output = StringProcessAdapter(fixture("diff_index_patch"))
res = Diff._index_from_patch_format(None, output)
self.assertEqual(len(res), 6)
for dr in res:
- self.assertTrue(dr.diff.startswith(b'@@'), dr)
- self.assertIsNotNone(str(dr), "Diff to string conversion should be possible")
+ self.assertTrue(dr.diff.startswith(b"@@"), dr)
+ self.assertIsNotNone(
+ str(dr), "Diff to string conversion should be possible"
+ )
# end for each diff
dr = res[3]
assert dr.diff.endswith(b"+Binary files a/rps and b/rps differ\n")
def test_diff_index_raw_format(self):
- output = StringProcessAdapter(fixture('diff_index_raw'))
+ output = StringProcessAdapter(fixture("diff_index_raw"))
res = Diff._index_from_raw_format(None, output)
self.assertIsNotNone(res[0].deleted_file)
- self.assertIsNone(res[0].b_path,)
+ self.assertIsNone(
+ res[0].b_path,
+ )
- @unittest.skip("This currently fails and would need someone to improve diff parsing")
+ @unittest.skip(
+ "This currently fails and would need someone to improve diff parsing"
+ )
def test_diff_file_with_colon(self):
- output = fixture('diff_file_with_colon')
+ output = fixture("diff_file_with_colon")
res = []
Diff._handle_diff_line(output, None, res)
def test_diff_initial_commit(self):
- initial_commit = self.rorepo.commit('33ebe7acec14b25c5f84f35a664803fcab2f7781')
+ initial_commit = self.rorepo.commit("33ebe7acec14b25c5f84f35a664803fcab2f7781")
# Without creating a patch...
diff_index = initial_commit.diff(NULL_TREE)
- self.assertEqual(diff_index[0].b_path, 'CHANGES')
+ self.assertEqual(diff_index[0].b_path, "CHANGES")
self.assertIsNotNone(diff_index[0].new_file)
- self.assertEqual(diff_index[0].diff, '')
+ self.assertEqual(diff_index[0].diff, "")
# ...and with creating a patch
diff_index = initial_commit.diff(NULL_TREE, create_patch=True)
self.assertIsNone(diff_index[0].a_path, repr(diff_index[0].a_path))
- self.assertEqual(diff_index[0].b_path, 'CHANGES', repr(diff_index[0].b_path))
+ self.assertEqual(diff_index[0].b_path, "CHANGES", repr(diff_index[0].b_path))
self.assertIsNotNone(diff_index[0].new_file)
- self.assertEqual(diff_index[0].diff, fixture('diff_initial'))
+ self.assertEqual(diff_index[0].diff, fixture("diff_initial"))
def test_diff_unsafe_paths(self):
- output = StringProcessAdapter(fixture('diff_patch_unsafe_paths'))
+ output = StringProcessAdapter(fixture("diff_patch_unsafe_paths"))
res = Diff._index_from_patch_format(None, output)
# The "Additions"
- self.assertEqual(res[0].b_path, 'path/ starting with a space')
+ self.assertEqual(res[0].b_path, "path/ starting with a space")
self.assertEqual(res[1].b_path, 'path/"with-quotes"')
self.assertEqual(res[2].b_path, "path/'with-single-quotes'")
- self.assertEqual(res[3].b_path, 'path/ending in a space ')
- self.assertEqual(res[4].b_path, 'path/with\ttab')
- self.assertEqual(res[5].b_path, 'path/with\nnewline')
- self.assertEqual(res[6].b_path, 'path/with spaces')
- self.assertEqual(res[7].b_path, 'path/with-question-mark?')
- self.assertEqual(res[8].b_path, 'path/¯\\_(ツ)_|¯')
- self.assertEqual(res[9].b_path, 'path/💩.txt')
- self.assertEqual(res[9].b_rawpath, b'path/\xf0\x9f\x92\xa9.txt')
- self.assertEqual(res[10].b_path, 'path/�-invalid-unicode-path.txt')
- self.assertEqual(res[10].b_rawpath, b'path/\x80-invalid-unicode-path.txt')
+ self.assertEqual(res[3].b_path, "path/ending in a space ")
+ self.assertEqual(res[4].b_path, "path/with\ttab")
+ self.assertEqual(res[5].b_path, "path/with\nnewline")
+ self.assertEqual(res[6].b_path, "path/with spaces")
+ self.assertEqual(res[7].b_path, "path/with-question-mark?")
+ self.assertEqual(res[8].b_path, "path/¯\\_(ツ)_|¯")
+ self.assertEqual(res[9].b_path, "path/💩.txt")
+ self.assertEqual(res[9].b_rawpath, b"path/\xf0\x9f\x92\xa9.txt")
+ self.assertEqual(res[10].b_path, "path/�-invalid-unicode-path.txt")
+ self.assertEqual(res[10].b_rawpath, b"path/\x80-invalid-unicode-path.txt")
# The "Moves"
# NOTE: The path prefixes a/ and b/ here are legit! We're actually
# verifying that it's not "a/a/" that shows up, see the fixture data.
- self.assertEqual(res[11].a_path, 'a/with spaces') # NOTE: path a/ here legit!
- self.assertEqual(res[11].b_path, 'b/with some spaces') # NOTE: path b/ here legit!
- self.assertEqual(res[12].a_path, 'a/ending in a space ')
- self.assertEqual(res[12].b_path, 'b/ending with space ')
+ self.assertEqual(res[11].a_path, "a/with spaces") # NOTE: path a/ here legit!
+ self.assertEqual(
+ res[11].b_path, "b/with some spaces"
+ ) # NOTE: path b/ here legit!
+ self.assertEqual(res[12].a_path, "a/ending in a space ")
+ self.assertEqual(res[12].b_path, "b/ending with space ")
self.assertEqual(res[13].a_path, 'a/"with-quotes"')
self.assertEqual(res[13].b_path, 'b/"with even more quotes"')
def test_diff_patch_format(self):
# test all of the 'old' format diffs for completeness - it should at least
# be able to deal with it
- fixtures = ("diff_2", "diff_2f", "diff_f", "diff_i", "diff_mode_only",
- "diff_new_mode", "diff_numstat", "diff_p", "diff_rename",
- "diff_tree_numstat_root", "diff_patch_unsafe_paths")
+ fixtures = (
+ "diff_2",
+ "diff_2f",
+ "diff_f",
+ "diff_i",
+ "diff_mode_only",
+ "diff_new_mode",
+ "diff_numstat",
+ "diff_p",
+ "diff_rename",
+ "diff_tree_numstat_root",
+ "diff_patch_unsafe_paths",
+ )
for fixture_name in fixtures:
diff_proc = StringProcessAdapter(fixture(fixture_name))
@@ -285,10 +318,12 @@ class TestDiff(TestBase):
# END for each fixture
def test_diff_with_spaces(self):
- data = StringProcessAdapter(fixture('diff_file_with_spaces'))
+ data = StringProcessAdapter(fixture("diff_file_with_spaces"))
diff_index = Diff._index_from_patch_format(self.rorepo, data)
self.assertIsNone(diff_index[0].a_path, repr(diff_index[0].a_path))
- self.assertEqual(diff_index[0].b_path, 'file with spaces', repr(diff_index[0].b_path))
+ self.assertEqual(
+ diff_index[0].b_path, "file with spaces", repr(diff_index[0].b_path)
+ )
def test_diff_submodule(self):
"""Test that diff is able to correctly diff commits that cover submodule changes"""
@@ -303,13 +338,13 @@ class TestDiff(TestBase):
repo = Repo.init(self.repo_dir)
with open(self.repo_dir + "/test", "w") as foo_test:
foo_test.write("")
- repo.index.add(['test'])
+ repo.index.add(["test"])
Submodule.add(repo, "subtest", "sub", url="file://" + self.submodule_dir)
repo.index.commit("first commit")
- repo.create_tag('1')
+ repo.create_tag("1")
# Add a commit to the submodule
- submodule = repo.submodule('subtest')
+ submodule = repo.submodule("subtest")
with open(self.repo_dir + "/sub/subfile", "w") as foo_sub_subfile:
foo_sub_subfile.write("blub")
submodule.module().index.add(["subfile"])
@@ -319,9 +354,9 @@ class TestDiff(TestBase):
# Commit submodule updates in parent repo
repo.index.add([submodule])
repo.index.commit("submodule changed")
- repo.create_tag('2')
+ repo.create_tag("2")
- diff = repo.commit('1').diff(repo.commit('2'))[0]
+ diff = repo.commit("1").diff(repo.commit("2"))[0]
# If diff is unable to find the commit hashes (looks in wrong repo) the *_blob.size
# property will be a string containing exception text, an int indicates success
self.assertIsInstance(diff.a_blob.size, int)
@@ -330,7 +365,7 @@ class TestDiff(TestBase):
def test_diff_interface(self):
# test a few variations of the main diff routine
assertion_map = {}
- for i, commit in enumerate(self.rorepo.iter_commits('0.1.6', max_count=2)):
+ for i, commit in enumerate(self.rorepo.iter_commits("0.1.6", max_count=2)):
diff_item = commit
if i % 2 == 0:
diff_item = commit.tree
@@ -339,15 +374,19 @@ class TestDiff(TestBase):
for other in (None, NULL_TREE, commit.Index, commit.parents[0]):
for paths in (None, "CHANGES", ("CHANGES", "lib")):
for create_patch in range(2):
- diff_index = diff_item.diff(other=other, paths=paths, create_patch=create_patch)
+ diff_index = diff_item.diff(
+ other=other, paths=paths, create_patch=create_patch
+ )
assert isinstance(diff_index, DiffIndex)
if diff_index:
self._assert_diff_format(diff_index)
for ct in DiffIndex.change_type:
- key = 'ct_%s' % ct
+ key = "ct_%s" % ct
assertion_map.setdefault(key, 0)
- assertion_map[key] = assertion_map[key] + len(list(diff_index.iter_change_type(ct)))
+ assertion_map[key] = assertion_map[key] + len(
+ list(diff_index.iter_change_type(ct))
+ )
# END for each changetype
# check entries
@@ -359,7 +398,10 @@ class TestDiff(TestBase):
self.assertFalse(diff_index[0] != diff_index[0])
for dr in diff_index:
- self.assertIsNotNone(str(dr), "Diff to string conversion should be possible")
+ self.assertIsNotNone(
+ str(dr),
+ "Diff to string conversion should be possible",
+ )
# END diff index checking
# END for each patch option
# END for each path option
diff --git a/test/test_docs.py b/test/test_docs.py
index 08fc8439..b6a0ed31 100644
--- a/test/test_docs.py
+++ b/test/test_docs.py
@@ -13,9 +13,9 @@ import os.path
class Tutorials(TestBase):
-
def tearDown(self):
import gc
+
gc.collect()
# @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, ## ACTUALLY skipped by `git.submodule.base#L869`.
@@ -34,71 +34,102 @@ class Tutorials(TestBase):
# ![1-test_init_repo_object]
# [2-test_init_repo_object]
- bare_repo = Repo.init(os.path.join(rw_dir, 'bare-repo'), bare=True)
+ bare_repo = Repo.init(os.path.join(rw_dir, "bare-repo"), bare=True)
assert bare_repo.bare
# ![2-test_init_repo_object]
# [3-test_init_repo_object]
- repo.config_reader() # get a config reader for read-only access
- with repo.config_writer(): # get a config writer to change configuration
- pass # call release() to be sure changes are written and locks are released
+ repo.config_reader() # get a config reader for read-only access
+ with repo.config_writer(): # get a config writer to change configuration
+ pass # call release() to be sure changes are written and locks are released
# ![3-test_init_repo_object]
# [4-test_init_repo_object]
assert not bare_repo.is_dirty() # check the dirty state
- repo.untracked_files # retrieve a list of untracked files
+ repo.untracked_files # retrieve a list of untracked files
# ['my_untracked_file']
# ![4-test_init_repo_object]
# [5-test_init_repo_object]
- cloned_repo = repo.clone(os.path.join(rw_dir, 'to/this/path'))
- assert cloned_repo.__class__ is Repo # clone an existing repository
- assert Repo.init(os.path.join(rw_dir, 'path/for/new/repo')).__class__ is Repo
+ cloned_repo = repo.clone(os.path.join(rw_dir, "to/this/path"))
+ assert cloned_repo.__class__ is Repo # clone an existing repository
+ assert Repo.init(os.path.join(rw_dir, "path/for/new/repo")).__class__ is Repo
# ![5-test_init_repo_object]
# [6-test_init_repo_object]
- with open(os.path.join(rw_dir, 'repo.tar'), 'wb') as fp:
+ with open(os.path.join(rw_dir, "repo.tar"), "wb") as fp:
repo.archive(fp)
# ![6-test_init_repo_object]
# repository paths
# [7-test_init_repo_object]
- assert os.path.isdir(cloned_repo.working_tree_dir) # directory with your work files
- assert cloned_repo.git_dir.startswith(cloned_repo.working_tree_dir) # directory containing the git repository
- assert bare_repo.working_tree_dir is None # bare repositories have no working tree
+ assert os.path.isdir(
+ cloned_repo.working_tree_dir
+ ) # directory with your work files
+ assert cloned_repo.git_dir.startswith(
+ cloned_repo.working_tree_dir
+ ) # directory containing the git repository
+ assert (
+ bare_repo.working_tree_dir is None
+ ) # bare repositories have no working tree
# ![7-test_init_repo_object]
# heads, tags and references
# heads are branches in git-speak
# [8-test_init_repo_object]
- self.assertEqual(repo.head.ref, repo.heads.master, # head is a sym-ref pointing to master
- "It's ok if TC not running from `master`.")
- self.assertEqual(repo.tags['0.3.5'], repo.tag('refs/tags/0.3.5')) # you can access tags in various ways too
- self.assertEqual(repo.refs.master, repo.heads['master']) # .refs provides all refs, ie heads ...
-
- if 'TRAVIS' not in os.environ:
- self.assertEqual(repo.refs['origin/master'], repo.remotes.origin.refs.master) # ... remotes ...
- self.assertEqual(repo.refs['0.3.5'], repo.tags['0.3.5']) # ... and tags
+ self.assertEqual(
+ repo.head.ref,
+ repo.heads.master, # head is a sym-ref pointing to master
+ "It's ok if TC not running from `master`.",
+ )
+ self.assertEqual(
+ repo.tags["0.3.5"], repo.tag("refs/tags/0.3.5")
+ ) # you can access tags in various ways too
+ self.assertEqual(
+ repo.refs.master, repo.heads["master"]
+ ) # .refs provides all refs, ie heads ...
+
+ if "TRAVIS" not in os.environ:
+ self.assertEqual(
+ repo.refs["origin/master"], repo.remotes.origin.refs.master
+ ) # ... remotes ...
+ self.assertEqual(repo.refs["0.3.5"], repo.tags["0.3.5"]) # ... and tags
# ![8-test_init_repo_object]
# create a new head/branch
# [9-test_init_repo_object]
- new_branch = cloned_repo.create_head('feature') # create a new branch ...
- assert cloned_repo.active_branch != new_branch # which wasn't checked out yet ...
- self.assertEqual(new_branch.commit, cloned_repo.active_branch.commit) # pointing to the checked-out commit
+ new_branch = cloned_repo.create_head("feature") # create a new branch ...
+ assert (
+ cloned_repo.active_branch != new_branch
+ ) # which wasn't checked out yet ...
+ self.assertEqual(
+ new_branch.commit, cloned_repo.active_branch.commit
+ ) # pointing to the checked-out commit
# It's easy to let a branch point to the previous commit, without affecting anything else
# Each reference provides access to the git object it points to, usually commits
- assert new_branch.set_commit('HEAD~1').commit == cloned_repo.active_branch.commit.parents[0]
+ assert (
+ new_branch.set_commit("HEAD~1").commit
+ == cloned_repo.active_branch.commit.parents[0]
+ )
# ![9-test_init_repo_object]
# create a new tag reference
# [10-test_init_repo_object]
- past = cloned_repo.create_tag('past', ref=new_branch,
- message="This is a tag-object pointing to %s" % new_branch.name)
- self.assertEqual(past.commit, new_branch.commit) # the tag points to the specified commit
- assert past.tag.message.startswith("This is") # and its object carries the message provided
-
- now = cloned_repo.create_tag('now') # This is a tag-reference. It may not carry meta-data
+ past = cloned_repo.create_tag(
+ "past",
+ ref=new_branch,
+ message="This is a tag-object pointing to %s" % new_branch.name,
+ )
+ self.assertEqual(
+ past.commit, new_branch.commit
+ ) # the tag points to the specified commit
+ assert past.tag.message.startswith(
+ "This is"
+ ) # and its object carries the message provided
+
+ now = cloned_repo.create_tag(
+ "now"
+ ) # This is a tag-reference. It may not carry meta-data
assert now.tag is None
# ![10-test_init_repo_object]
@@ -106,17 +137,26 @@ class Tutorials(TestBase):
# [11-test_init_repo_object]
assert now.commit.message != past.commit.message
# You can read objects directly through binary streams, no working tree required
- assert (now.commit.tree / 'VERSION').data_stream.read().decode('ascii').startswith('3')
+ assert (
+ (now.commit.tree / "VERSION")
+ .data_stream.read()
+ .decode("ascii")
+ .startswith("3")
+ )
# You can traverse trees as well to handle all contained files of a particular commit
file_count = 0
tree_count = 0
tree = past.commit.tree
for item in tree.traverse():
- file_count += item.type == 'blob'
- tree_count += item.type == 'tree'
- assert file_count and tree_count # we have accumulated all directories and files
- self.assertEqual(len(tree.blobs) + len(tree.trees), len(tree)) # a tree is iterable on its children
+ file_count += item.type == "blob"
+ tree_count += item.type == "tree"
+ assert (
+ file_count and tree_count
+ ) # we have accumulated all directories and files
+ self.assertEqual(
+ len(tree.blobs) + len(tree.trees), len(tree)
+ ) # a tree is iterable on its children
# ![11-test_init_repo_object]
# remotes allow handling push, pull and fetch operations
@@ -124,19 +164,28 @@ class Tutorials(TestBase):
from git import RemoteProgress
class MyProgressPrinter(RemoteProgress):
- def update(self, op_code, cur_count, max_count=None, message=''):
- print(op_code, cur_count, max_count, cur_count / (max_count or 100.0), message or "NO MESSAGE")
+ def update(self, op_code, cur_count, max_count=None, message=""):
+ print(
+ op_code,
+ cur_count,
+ max_count,
+ cur_count / (max_count or 100.0),
+ message or "NO MESSAGE",
+ )
+
# end
- self.assertEqual(len(cloned_repo.remotes), 1) # we have been cloned, so should be one remote
- self.assertEqual(len(bare_repo.remotes), 0) # this one was just initialized
- origin = bare_repo.create_remote('origin', url=cloned_repo.working_tree_dir)
+ self.assertEqual(
+ len(cloned_repo.remotes), 1
+ ) # we have been cloned, so should be one remote
+ self.assertEqual(len(bare_repo.remotes), 0) # this one was just initialized
+ origin = bare_repo.create_remote("origin", url=cloned_repo.working_tree_dir)
assert origin.exists()
for fetch_info in origin.fetch(progress=MyProgressPrinter()):
print("Updated %s to %s" % (fetch_info.ref, fetch_info.commit))
# create a local branch at the latest fetched master. We specify the name statically, but you have all
# information to do it programmatically as well.
- bare_master = bare_repo.create_head('master', origin.refs.master)
+ bare_master = bare_repo.create_head("master", origin.refs.master)
bare_repo.head.set_reference(bare_master)
assert not bare_repo.delete_remote(origin).exists()
# push and pull behave very similarly
@@ -144,28 +193,42 @@ class Tutorials(TestBase):
# index
# [13-test_init_repo_object]
- self.assertEqual(new_branch.checkout(), cloned_repo.active_branch) # checking out branch adjusts the wtree
- self.assertEqual(new_branch.commit, past.commit) # Now the past is checked out
-
- new_file_path = os.path.join(cloned_repo.working_tree_dir, 'my-new-file')
- open(new_file_path, 'wb').close() # create new file in working tree
- cloned_repo.index.add([new_file_path]) # add it to the index
+ self.assertEqual(
+ new_branch.checkout(), cloned_repo.active_branch
+ ) # checking out branch adjusts the wtree
+ self.assertEqual(new_branch.commit, past.commit) # Now the past is checked out
+
+ new_file_path = os.path.join(cloned_repo.working_tree_dir, "my-new-file")
+ open(new_file_path, "wb").close() # create new file in working tree
+ cloned_repo.index.add([new_file_path]) # add it to the index
# Commit the changes to deviate masters history
cloned_repo.index.commit("Added a new file in the past - for later merege")
# prepare a merge
- master = cloned_repo.heads.master # right-hand side is ahead of us, in the future
- merge_base = cloned_repo.merge_base(new_branch, master) # allows for a three-way merge
- cloned_repo.index.merge_tree(master, base=merge_base) # write the merge result into index
- cloned_repo.index.commit("Merged past and now into future ;)",
- parent_commits=(new_branch.commit, master.commit))
+ master = (
+ cloned_repo.heads.master
+ ) # right-hand side is ahead of us, in the future
+ merge_base = cloned_repo.merge_base(
+ new_branch, master
+ ) # allows for a three-way merge
+ cloned_repo.index.merge_tree(
+ master, base=merge_base
+ ) # write the merge result into index
+ cloned_repo.index.commit(
+ "Merged past and now into future ;)",
+ parent_commits=(new_branch.commit, master.commit),
+ )
# now new_branch is ahead of master, which probably should be checked out and reset softly.
# note that all these operations didn't touch the working tree, as we managed it ourselves.
# This definitely requires you to know what you are doing :) !
- assert os.path.basename(new_file_path) in new_branch.commit.tree # new file is now in tree
- master.commit = new_branch.commit # let master point to most recent commit
- cloned_repo.head.reference = master # we adjusted just the reference, not the working tree or index
+ assert (
+ os.path.basename(new_file_path) in new_branch.commit.tree
+ ) # new file is now in tree
+ master.commit = new_branch.commit # let master point to most recent commit
+ cloned_repo.head.reference = (
+ master # we adjusted just the reference, not the working tree or index
+ )
# ![13-test_init_repo_object]
# submodules
@@ -175,110 +238,135 @@ class Tutorials(TestBase):
# As our GitPython repository has submodules already that point to GitHub, make sure we don't
# interact with them
for sm in cloned_repo.submodules:
- assert not sm.remove().exists() # after removal, the sm doesn't exist anymore
- sm = cloned_repo.create_submodule('mysubrepo', 'path/to/subrepo', url=bare_repo.git_dir, branch='master')
+ assert (
+ not sm.remove().exists()
+ ) # after removal, the sm doesn't exist anymore
+ sm = cloned_repo.create_submodule(
+ "mysubrepo", "path/to/subrepo", url=bare_repo.git_dir, branch="master"
+ )
# .gitmodules was written and added to the index, which is now being committed
cloned_repo.index.commit("Added submodule")
- assert sm.exists() and sm.module_exists() # this submodule is defintely available
- sm.remove(module=True, configuration=False) # remove the working tree
- assert sm.exists() and not sm.module_exists() # the submodule itself is still available
+ assert (
+ sm.exists() and sm.module_exists()
+ ) # this submodule is defintely available
+ sm.remove(module=True, configuration=False) # remove the working tree
+ assert (
+ sm.exists() and not sm.module_exists()
+ ) # the submodule itself is still available
# update all submodules, non-recursively to save time, this method is very powerful, go have a look
cloned_repo.submodule_update(recursive=False)
- assert sm.module_exists() # The submodules working tree was checked out by update
+ assert (
+ sm.module_exists()
+ ) # The submodules working tree was checked out by update
# ![14-test_init_repo_object]
@with_rw_directory
def test_references_and_objects(self, rw_dir):
# [1-test_references_and_objects]
import git
- repo = git.Repo.clone_from(self._small_repo_url(), os.path.join(rw_dir, 'repo'), branch='master')
+
+ repo = git.Repo.clone_from(
+ self._small_repo_url(), os.path.join(rw_dir, "repo"), branch="master"
+ )
heads = repo.heads
- master = heads.master # lists can be accessed by name for convenience
- master.commit # the commit pointed to by head called master
- master.rename('new_name') # rename heads
- master.rename('master')
+ master = heads.master # lists can be accessed by name for convenience
+ master.commit # the commit pointed to by head called master
+ master.rename("new_name") # rename heads
+ master.rename("master")
# ![1-test_references_and_objects]
# [2-test_references_and_objects]
tags = repo.tags
tagref = tags[0]
- tagref.tag # tags may have tag objects carrying additional information
- tagref.commit # but they always point to commits
- repo.delete_tag(tagref) # delete or
- repo.create_tag("my_tag") # create tags using the repo for convenience
+ tagref.tag # tags may have tag objects carrying additional information
+ tagref.commit # but they always point to commits
+ repo.delete_tag(tagref) # delete or
+ repo.create_tag("my_tag") # create tags using the repo for convenience
# ![2-test_references_and_objects]
# [3-test_references_and_objects]
- head = repo.head # the head points to the active branch/ref
- master = head.reference # retrieve the reference the head points to
- master.commit # from here you use it as any other reference
+ head = repo.head # the head points to the active branch/ref
+ master = head.reference # retrieve the reference the head points to
+ master.commit # from here you use it as any other reference
# ![3-test_references_and_objects]
-#
+ #
# [4-test_references_and_objects]
log = master.log()
- log[0] # first (i.e. oldest) reflog entry
- log[-1] # last (i.e. most recent) reflog entry
+ log[0] # first (i.e. oldest) reflog entry
+ log[-1] # last (i.e. most recent) reflog entry
# ![4-test_references_and_objects]
# [5-test_references_and_objects]
- new_branch = repo.create_head('new') # create a new one
- new_branch.commit = 'HEAD~10' # set branch to another commit without changing index or working trees
- repo.delete_head(new_branch) # delete an existing head - only works if it is not checked out
+ new_branch = repo.create_head("new") # create a new one
+ new_branch.commit = "HEAD~10" # set branch to another commit without changing index or working trees
+ repo.delete_head(
+ new_branch
+ ) # delete an existing head - only works if it is not checked out
# ![5-test_references_and_objects]
# [6-test_references_and_objects]
- new_tag = repo.create_tag('my_new_tag', message='my message')
+ new_tag = repo.create_tag("my_new_tag", message="my message")
# You cannot change the commit a tag points to. Tags need to be re-created
- self.assertRaises(AttributeError, setattr, new_tag, 'commit', repo.commit('HEAD~1'))
+ self.assertRaises(
+ AttributeError, setattr, new_tag, "commit", repo.commit("HEAD~1")
+ )
repo.delete_tag(new_tag)
# ![6-test_references_and_objects]
# [7-test_references_and_objects]
- new_branch = repo.create_head('another-branch')
+ new_branch = repo.create_head("another-branch")
repo.head.reference = new_branch
# ![7-test_references_and_objects]
# [8-test_references_and_objects]
hc = repo.head.commit
hct = hc.tree
- hc != hct # @NoEffect
- hc != repo.tags[0] # @NoEffect
- hc == repo.head.reference.commit # @NoEffect
+ hc != hct # @NoEffect
+ hc != repo.tags[0] # @NoEffect
+ hc == repo.head.reference.commit # @NoEffect
# ![8-test_references_and_objects]
# [9-test_references_and_objects]
- self.assertEqual(hct.type, 'tree') # preset string type, being a class attribute
- assert hct.size > 0 # size in bytes
+ self.assertEqual(
+ hct.type, "tree"
+ ) # preset string type, being a class attribute
+ assert hct.size > 0 # size in bytes
assert len(hct.hexsha) == 40
assert len(hct.binsha) == 20
# ![9-test_references_and_objects]
# [10-test_references_and_objects]
- self.assertEqual(hct.path, '') # root tree has no path
- assert hct.trees[0].path != '' # the first contained item has one though
- self.assertEqual(hct.mode, 0o40000) # trees have the mode of a linux directory
- self.assertEqual(hct.blobs[0].mode, 0o100644) # blobs have specific mode, comparable to a standard linux fs
+ self.assertEqual(hct.path, "") # root tree has no path
+ assert hct.trees[0].path != "" # the first contained item has one though
+ self.assertEqual(hct.mode, 0o40000) # trees have the mode of a linux directory
+ self.assertEqual(
+ hct.blobs[0].mode, 0o100644
+ ) # blobs have specific mode, comparable to a standard linux fs
# ![10-test_references_and_objects]
# [11-test_references_and_objects]
- hct.blobs[0].data_stream.read() # stream object to read data from
- hct.blobs[0].stream_data(open(os.path.join(rw_dir, 'blob_data'), 'wb')) # write data to given stream
+ hct.blobs[0].data_stream.read() # stream object to read data from
+ hct.blobs[0].stream_data(
+ open(os.path.join(rw_dir, "blob_data"), "wb")
+ ) # write data to given stream
# ![11-test_references_and_objects]
# [12-test_references_and_objects]
- repo.commit('master')
- repo.commit('v0.8.1')
- repo.commit('HEAD~10')
+ repo.commit("master")
+ repo.commit("v0.8.1")
+ repo.commit("HEAD~10")
# ![12-test_references_and_objects]
# [13-test_references_and_objects]
- fifty_first_commits = list(repo.iter_commits('master', max_count=50))
+ fifty_first_commits = list(repo.iter_commits("master", max_count=50))
assert len(fifty_first_commits) == 50
# this will return commits 21-30 from the commit list as traversed backwards master
- ten_commits_past_twenty = list(repo.iter_commits('master', max_count=10, skip=20))
+ ten_commits_past_twenty = list(
+ repo.iter_commits("master", max_count=10, skip=20)
+ )
assert len(ten_commits_past_twenty) == 10
assert fifty_first_commits[20:30] == ten_commits_past_twenty
# ![13-test_references_and_objects]
@@ -287,22 +375,23 @@ class Tutorials(TestBase):
headcommit = repo.head.commit
assert len(headcommit.hexsha) == 40
assert len(headcommit.parents) > 0
- assert headcommit.tree.type == 'tree'
+ assert headcommit.tree.type == "tree"
assert len(headcommit.author.name) != 0
assert isinstance(headcommit.authored_date, int)
assert len(headcommit.committer.name) != 0
assert isinstance(headcommit.committed_date, int)
- assert headcommit.message != ''
+ assert headcommit.message != ""
# ![14-test_references_and_objects]
# [15-test_references_and_objects]
import time
+
time.asctime(time.gmtime(headcommit.committed_date))
time.strftime("%a, %d %b %Y %H:%M", time.gmtime(headcommit.committed_date))
# ![15-test_references_and_objects]
# [16-test_references_and_objects]
- assert headcommit.parents[0].parents[0].parents[0] == repo.commit('master^^^')
+ assert headcommit.parents[0].parents[0].parents[0] == repo.commit("master^^^")
# ![16-test_references_and_objects]
# [17-test_references_and_objects]
@@ -311,33 +400,41 @@ class Tutorials(TestBase):
# ![17-test_references_and_objects]
# [18-test_references_and_objects]
- assert len(tree.trees) > 0 # trees are subdirectories
- assert len(tree.blobs) > 0 # blobs are files
+ assert len(tree.trees) > 0 # trees are subdirectories
+ assert len(tree.blobs) > 0 # blobs are files
assert len(tree.blobs) + len(tree.trees) == len(tree)
# ![18-test_references_and_objects]
# [19-test_references_and_objects]
- self.assertEqual(tree['smmap'], tree / 'smmap') # access by index and by sub-path
- for entry in tree: # intuitive iteration of tree members
+ self.assertEqual(
+ tree["smmap"], tree / "smmap"
+ ) # access by index and by sub-path
+ for entry in tree: # intuitive iteration of tree members
print(entry)
- blob = tree.trees[1].blobs[0] # let's get a blob in a sub-tree
+ blob = tree.trees[1].blobs[0] # let's get a blob in a sub-tree
assert blob.name
assert len(blob.path) < len(blob.abspath)
- self.assertEqual(tree.trees[1].name + '/' + blob.name, blob.path) # this is how relative blob path generated
- self.assertEqual(tree[blob.path], blob) # you can use paths like 'dir/file' in tree
+ self.assertEqual(
+ tree.trees[1].name + "/" + blob.name, blob.path
+ ) # this is how relative blob path generated
+ self.assertEqual(
+ tree[blob.path], blob
+ ) # you can use paths like 'dir/file' in tree
# ![19-test_references_and_objects]
# [20-test_references_and_objects]
- assert tree / 'smmap' == tree['smmap']
+ assert tree / "smmap" == tree["smmap"]
assert tree / blob.path == tree[blob.path]
# ![20-test_references_and_objects]
# [21-test_references_and_objects]
# This example shows the various types of allowed ref-specs
assert repo.tree() == repo.head.commit.tree
- past = repo.commit('HEAD~5')
+ past = repo.commit("HEAD~5")
assert repo.tree(past) == repo.tree(past.hexsha)
- self.assertEqual(repo.tree('v0.8.1').type, 'tree') # yes, you can provide any refspec - works everywhere
+ self.assertEqual(
+ repo.tree("v0.8.1").type, "tree"
+ ) # yes, you can provide any refspec - works everywhere
# ![21-test_references_and_objects]
# [22-test_references_and_objects]
@@ -347,20 +444,27 @@ class Tutorials(TestBase):
# [23-test_references_and_objects]
index = repo.index
# The index contains all blobs in a flat list
- assert len(list(index.iter_blobs())) == len([o for o in repo.head.commit.tree.traverse() if o.type == 'blob'])
+ assert len(list(index.iter_blobs())) == len(
+ [o for o in repo.head.commit.tree.traverse() if o.type == "blob"]
+ )
# Access blob objects
for (_path, _stage), entry in index.entries.items():
pass
- new_file_path = os.path.join(repo.working_tree_dir, 'new-file-name')
- open(new_file_path, 'w').close()
- index.add([new_file_path]) # add a new file to the index
- index.remove(['LICENSE']) # remove an existing one
- assert os.path.isfile(os.path.join(repo.working_tree_dir, 'LICENSE')) # working tree is untouched
-
- self.assertEqual(index.commit("my commit message").type, 'commit') # commit changed index
- repo.active_branch.commit = repo.commit('HEAD~1') # forget last commit
+ new_file_path = os.path.join(repo.working_tree_dir, "new-file-name")
+ open(new_file_path, "w").close()
+ index.add([new_file_path]) # add a new file to the index
+ index.remove(["LICENSE"]) # remove an existing one
+ assert os.path.isfile(
+ os.path.join(repo.working_tree_dir, "LICENSE")
+ ) # working tree is untouched
+
+ self.assertEqual(
+ index.commit("my commit message").type, "commit"
+ ) # commit changed index
+ repo.active_branch.commit = repo.commit("HEAD~1") # forget last commit
from git import Actor
+
author = Actor("An author", "author@example.com")
committer = Actor("A committer", "committer@example.com")
# commit by commit message and author and committer
@@ -369,28 +473,37 @@ class Tutorials(TestBase):
# [24-test_references_and_objects]
from git import IndexFile
+
# loads a tree into a temporary index, which exists just in memory
- IndexFile.from_tree(repo, 'HEAD~1')
+ IndexFile.from_tree(repo, "HEAD~1")
# merge two trees three-way into memory
- merge_index = IndexFile.from_tree(repo, 'HEAD~10', 'HEAD', repo.merge_base('HEAD~10', 'HEAD'))
+ merge_index = IndexFile.from_tree(
+ repo, "HEAD~10", "HEAD", repo.merge_base("HEAD~10", "HEAD")
+ )
# and persist it
- merge_index.write(os.path.join(rw_dir, 'merged_index'))
+ merge_index.write(os.path.join(rw_dir, "merged_index"))
# ![24-test_references_and_objects]
# [25-test_references_and_objects]
- empty_repo = git.Repo.init(os.path.join(rw_dir, 'empty'))
- origin = empty_repo.create_remote('origin', repo.remotes.origin.url)
+ empty_repo = git.Repo.init(os.path.join(rw_dir, "empty"))
+ origin = empty_repo.create_remote("origin", repo.remotes.origin.url)
assert origin.exists()
- assert origin == empty_repo.remotes.origin == empty_repo.remotes['origin']
- origin.fetch() # assure we actually have data. fetch() returns useful information
+ assert origin == empty_repo.remotes.origin == empty_repo.remotes["origin"]
+ origin.fetch() # assure we actually have data. fetch() returns useful information
# Setup a local tracking branch of a remote branch
- empty_repo.create_head('master', origin.refs.master) # create local branch "master" from remote "master"
- empty_repo.heads.master.set_tracking_branch(origin.refs.master) # set local "master" to track remote "master
+ empty_repo.create_head(
+ "master", origin.refs.master
+ ) # create local branch "master" from remote "master"
+ empty_repo.heads.master.set_tracking_branch(
+ origin.refs.master
+ ) # set local "master" to track remote "master
empty_repo.heads.master.checkout() # checkout local "master" to working tree
# Three above commands in one:
- empty_repo.create_head('master', origin.refs.master).set_tracking_branch(origin.refs.master).checkout()
+ empty_repo.create_head("master", origin.refs.master).set_tracking_branch(
+ origin.refs.master
+ ).checkout()
# rename remotes
- origin.rename('new_origin')
+ origin.rename("new_origin")
# push and pull behaves similarly to `git push|pull`
origin.pull()
origin.push() # attempt push, ignore errors
@@ -409,32 +522,32 @@ class Tutorials(TestBase):
# [27-test_references_and_objects]
hcommit = repo.head.commit
- hcommit.diff() # diff tree against index
- hcommit.diff('HEAD~1') # diff tree against previous tree
- hcommit.diff(None) # diff tree against working tree
+ hcommit.diff() # diff tree against index
+ hcommit.diff("HEAD~1") # diff tree against previous tree
+ hcommit.diff(None) # diff tree against working tree
index = repo.index
- index.diff() # diff index against itself yielding empty diff
- index.diff(None) # diff index against working copy
- index.diff('HEAD') # diff index against current HEAD tree
+ index.diff() # diff index against itself yielding empty diff
+ index.diff(None) # diff index against working copy
+ index.diff("HEAD") # diff index against current HEAD tree
# ![27-test_references_and_objects]
# [28-test_references_and_objects]
# Traverse added Diff objects only
- for diff_added in hcommit.diff('HEAD~1').iter_change_type('A'):
+ for diff_added in hcommit.diff("HEAD~1").iter_change_type("A"):
print(diff_added)
# ![28-test_references_and_objects]
# [29-test_references_and_objects]
# Reset our working tree 10 commits into the past
- past_branch = repo.create_head('past_branch', 'HEAD~10')
+ past_branch = repo.create_head("past_branch", "HEAD~10")
repo.head.reference = past_branch
assert not repo.head.is_detached
# reset the index and working tree to match the pointed-to commit
repo.head.reset(index=True, working_tree=True)
# To detach your head, you have to point to a commit directly
- repo.head.reference = repo.commit('HEAD~5')
+ repo.head.reference = repo.commit("HEAD~5")
assert repo.head.is_detached
# now our head points 15 commits into the past, whereas the working tree
# and index are 10 commits in the past
@@ -448,10 +561,12 @@ class Tutorials(TestBase):
# [31-test_references_and_objects]
git = repo.git
- git.checkout('HEAD', b="my_new_branch") # create a new branch
- git.branch('another-new-one')
- git.branch('-D', 'another-new-one') # pass strings for full control over argument order
- git.for_each_ref() # '-' becomes '_' when calling it
+ git.checkout("HEAD", b="my_new_branch") # create a new branch
+ git.branch("another-new-one")
+ git.branch(
+ "-D", "another-new-one"
+ ) # pass strings for full control over argument order
+ git.for_each_ref() # '-' becomes '_' when calling it
# ![31-test_references_and_objects]
repo.git.clear_cache()
@@ -463,31 +578,37 @@ class Tutorials(TestBase):
assert len(sms) == 1
sm = sms[0]
- self.assertEqual(sm.name, 'gitdb') # git-python has gitdb as single submodule ...
- self.assertEqual(sm.children()[0].name, 'smmap') # ... which has smmap as single submodule
+ self.assertEqual(
+ sm.name, "gitdb"
+ ) # git-python has gitdb as single submodule ...
+ self.assertEqual(
+ sm.children()[0].name, "smmap"
+ ) # ... which has smmap as single submodule
# The module is the repository referenced by the submodule
- assert sm.module_exists() # the module is available, which doesn't have to be the case.
- assert sm.module().working_tree_dir.endswith('gitdb')
+ assert (
+ sm.module_exists()
+ ) # the module is available, which doesn't have to be the case.
+ assert sm.module().working_tree_dir.endswith("gitdb")
# the submodule's absolute path is the module's path
assert sm.abspath == sm.module().working_tree_dir
- self.assertEqual(len(sm.hexsha), 40) # Its sha defines the commit to checkout
- assert sm.exists() # yes, this submodule is valid and exists
+ self.assertEqual(len(sm.hexsha), 40) # Its sha defines the commit to checkout
+ assert sm.exists() # yes, this submodule is valid and exists
# read its configuration conveniently
- assert sm.config_reader().get_value('path') == sm.path
- self.assertEqual(len(sm.children()), 1) # query the submodule hierarchy
+ assert sm.config_reader().get_value("path") == sm.path
+ self.assertEqual(len(sm.children()), 1) # query the submodule hierarchy
# ![1-test_submodules]
@with_rw_directory
def test_add_file_and_commit(self, rw_dir):
import git
- repo_dir = os.path.join(rw_dir, 'my-new-repo')
- file_name = os.path.join(repo_dir, 'new-file')
+ repo_dir = os.path.join(rw_dir, "my-new-repo")
+ file_name = os.path.join(repo_dir, "new-file")
r = git.Repo.init(repo_dir)
# This function just creates an empty file ...
- open(file_name, 'wb').close()
+ open(file_name, "wb").close()
r.index.add([file_name])
r.index.commit("initial commit")
diff --git a/test/test_exc.py b/test/test_exc.py
index c77be782..6c3353fc 100644
--- a/test/test_exc.py
+++ b/test/test_exc.py
@@ -29,34 +29,43 @@ import itertools as itt
_cmd_argvs = (
- ('cmd', ),
- ('θνιψοδε', ),
- ('θνιψοδε', 'normal', 'argvs'),
- ('cmd', 'ελληνικα', 'args'),
- ('θνιψοδε', 'κι', 'αλλα', 'strange', 'args'),
- ('θνιψοδε', 'κι', 'αλλα', 'non-unicode', 'args'),
- ('git', 'clone', '-v', 'https://fakeuser:fakepassword1234@fakerepo.example.com/testrepo'),
+ ("cmd",),
+ ("θνιψοδε",),
+ ("θνιψοδε", "normal", "argvs"),
+ ("cmd", "ελληνικα", "args"),
+ ("θνιψοδε", "κι", "αλλα", "strange", "args"),
+ ("θνιψοδε", "κι", "αλλα", "non-unicode", "args"),
+ (
+ "git",
+ "clone",
+ "-v",
+ "https://fakeuser:fakepassword1234@fakerepo.example.com/testrepo",
+ ),
)
_causes_n_substrings = (
- (None, None), # noqa: E241 @IgnorePep8
- (7, "exit code(7)"), # noqa: E241 @IgnorePep8
- ('Some string', "'Some string'"), # noqa: E241 @IgnorePep8
- ('παλιο string', "'παλιο string'"), # noqa: E241 @IgnorePep8
- (Exception("An exc."), "Exception('An exc.')"), # noqa: E241 @IgnorePep8
- (Exception("Κακια exc."), "Exception('Κακια exc.')"), # noqa: E241 @IgnorePep8
- (object(), "<object object at "), # noqa: E241 @IgnorePep8
+ (None, None), # noqa: E241 @IgnorePep8
+ (7, "exit code(7)"), # noqa: E241 @IgnorePep8
+ ("Some string", "'Some string'"), # noqa: E241 @IgnorePep8
+ ("παλιο string", "'παλιο string'"), # noqa: E241 @IgnorePep8
+ (Exception("An exc."), "Exception('An exc.')"), # noqa: E241 @IgnorePep8
+ (Exception("Κακια exc."), "Exception('Κακια exc.')"), # noqa: E241 @IgnorePep8
+ (object(), "<object object at "), # noqa: E241 @IgnorePep8
)
-_streams_n_substrings = (None, 'steram', 'ομορφο stream', )
+_streams_n_substrings = (
+ None,
+ "steram",
+ "ομορφο stream",
+)
@ddt.ddt
class TExc(TestBase):
-
def test_ExceptionsHaveBaseClass(self):
from git.exc import GitError
+
self.assertIsInstance(GitError(), Exception)
-
+
exception_classes = [
InvalidGitRepositoryError,
WorkTreeRepositoryUnsupported,
@@ -73,7 +82,9 @@ class TExc(TestBase):
for ex_class in exception_classes:
self.assertTrue(issubclass(ex_class, GitError))
- @ddt.data(*list(itt.product(_cmd_argvs, _causes_n_substrings, _streams_n_substrings)))
+ @ddt.data(
+ *list(itt.product(_cmd_argvs, _causes_n_substrings, _streams_n_substrings))
+ )
def test_CommandError_unicode(self, case):
argv, (cause, subs), stream = case
cls = CommandError
@@ -81,7 +92,7 @@ class TExc(TestBase):
s = str(c)
self.assertIsNotNone(c._msg)
- self.assertIn(' cmdline: ', s)
+ self.assertIn(" cmdline: ", s)
for a in remove_password_if_present(argv):
self.assertIn(a, s)
@@ -112,17 +123,17 @@ class TExc(TestBase):
self.assertIn(" stdout:", s)
self.assertIn(stream, s)
- c = cls(argv, cause, stream, stream + 'no2')
+ c = cls(argv, cause, stream, stream + "no2")
s = str(c)
self.assertIn(" stderr:", s)
self.assertIn(stream, s)
self.assertIn(" stdout:", s)
- self.assertIn(stream + 'no2', s)
+ self.assertIn(stream + "no2", s)
@ddt.data(
- (['cmd1'], None),
- (['cmd1'], "some cause"),
- (['cmd1'], Exception()),
+ (["cmd1"], None),
+ (["cmd1"], "some cause"),
+ (["cmd1"], Exception()),
)
def test_GitCommandNotFound(self, init_args):
argv, cause = init_args
@@ -131,15 +142,15 @@ class TExc(TestBase):
self.assertIn(argv[0], s)
if cause:
- self.assertIn(' not found due to: ', s)
+ self.assertIn(" not found due to: ", s)
self.assertIn(str(cause), s)
else:
- self.assertIn(' not found!', s)
+ self.assertIn(" not found!", s)
@ddt.data(
- (['cmd1'], None),
- (['cmd1'], "some cause"),
- (['cmd1', 'https://fakeuser@fakerepo.example.com/testrepo'], Exception()),
+ (["cmd1"], None),
+ (["cmd1"], "some cause"),
+ (["cmd1", "https://fakeuser@fakerepo.example.com/testrepo"], Exception()),
)
def test_GitCommandError(self, init_args):
argv, cause = init_args
@@ -149,15 +160,15 @@ class TExc(TestBase):
for arg in remove_password_if_present(argv):
self.assertIn(arg, s)
if cause:
- self.assertIn(' failed due to: ', s)
+ self.assertIn(" failed due to: ", s)
self.assertIn(str(cause), s)
else:
- self.assertIn(' failed!', s)
+ self.assertIn(" failed!", s)
@ddt.data(
- (['cmd1'], None),
- (['cmd1'], "some cause"),
- (['cmd1'], Exception()),
+ (["cmd1"], None),
+ (["cmd1"], "some cause"),
+ (["cmd1"], Exception()),
)
def test_HookExecutionError(self, init_args):
argv, cause = init_args
@@ -166,7 +177,7 @@ class TExc(TestBase):
self.assertIn(argv[0], s)
if cause:
- self.assertTrue(s.startswith('Hook('), s)
+ self.assertTrue(s.startswith("Hook("), s)
self.assertIn(str(cause), s)
else:
- self.assertIn(' failed!', s)
+ self.assertIn(" failed!", s)
diff --git a/test/test_fun.py b/test/test_fun.py
index e3d07194..7c99a4a1 100644
--- a/test/test_fun.py
+++ b/test/test_fun.py
@@ -16,23 +16,19 @@ from git.objects.fun import (
tree_to_stream,
tree_entries_from_data,
)
-from git.repo.fun import (
- find_worktree_git_dir
-)
-from test.lib import (
- TestBase,
- with_rw_repo,
- with_rw_directory
-)
+from git.repo.fun import find_worktree_git_dir
+from test.lib import TestBase, with_rw_repo, with_rw_directory
from git.util import bin_to_hex, cygpath, join_path_native
from gitdb.base import IStream
from gitdb.typ import str_tree_type
class TestFun(TestBase):
-
def _assert_index_entries(self, entries, trees):
- index = IndexFile.from_tree(self.rorepo, *[self.rorepo.tree(bin_to_hex(t).decode('ascii')) for t in trees])
+ index = IndexFile.from_tree(
+ self.rorepo,
+ *[self.rorepo.tree(bin_to_hex(t).decode("ascii")) for t in trees]
+ )
assert entries
assert len(index.entries) == len(entries)
for entry in entries:
@@ -80,7 +76,7 @@ class TestFun(TestBase):
istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
return istream.binsha
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_three_way_merge(self, rwrepo):
def mkfile(name, sha, executable=0):
return (sha, S_IFREG | 0o644 | executable * 0o111, name)
@@ -91,6 +87,7 @@ class TestFun(TestBase):
def assert_entries(entries, num_entries, has_conflict=False):
assert len(entries) == num_entries
assert has_conflict == (len([e for e in entries if e.stage != 0]) > 0)
+
mktree = self.mktree
shaa = b"\1" * 20
@@ -100,14 +97,14 @@ class TestFun(TestBase):
odb = rwrepo.odb
# base tree
- bfn = 'basefile'
+ bfn = "basefile"
fbase = mkfile(bfn, shaa)
tb = mktree(odb, [fbase])
# non-conflicting new files, same data
- fa = mkfile('1', shab)
+ fa = mkfile("1", shab)
th = mktree(odb, [fbase, fa])
- fb = mkfile('2', shac)
+ fb = mkfile("2", shac)
tm = mktree(odb, [fbase, fb])
# two new files, same base file
@@ -115,9 +112,9 @@ class TestFun(TestBase):
assert_entries(aggressive_tree_merge(odb, trees), 3)
# both delete same file, add own one
- fa = mkfile('1', shab)
+ fa = mkfile("1", shab)
th = mktree(odb, [fa])
- fb = mkfile('2', shac)
+ fb = mkfile("2", shac)
tm = mktree(odb, [fb])
# two new files
@@ -125,9 +122,9 @@ class TestFun(TestBase):
assert_entries(aggressive_tree_merge(odb, trees), 2)
# same file added in both, differently
- fa = mkfile('1', shab)
+ fa = mkfile("1", shab)
th = mktree(odb, [fa])
- fb = mkfile('1', shac)
+ fb = mkfile("1", shac)
tm = mktree(odb, [fb])
# expect conflict
@@ -135,9 +132,9 @@ class TestFun(TestBase):
assert_entries(aggressive_tree_merge(odb, trees), 2, True)
# same file added, different mode
- fa = mkfile('1', shab)
+ fa = mkfile("1", shab)
th = mktree(odb, [fa])
- fb = mkcommit('1', shab)
+ fb = mkcommit("1", shab)
tm = mktree(odb, [fb])
# expect conflict
@@ -145,9 +142,9 @@ class TestFun(TestBase):
assert_entries(aggressive_tree_merge(odb, trees), 2, True)
# same file added in both
- fa = mkfile('1', shab)
+ fa = mkfile("1", shab)
th = mktree(odb, [fa])
- fb = mkfile('1', shab)
+ fb = mkfile("1", shab)
tm = mktree(odb, [fb])
# expect conflict
@@ -194,7 +191,11 @@ class TestFun(TestBase):
if is_them:
trees = [tb, tb, th]
entries = aggressive_tree_merge(odb, trees)
- assert len(entries) == 1 and entries[0].binsha == shaa and entries[0].mode == fa[1]
+ assert (
+ len(entries) == 1
+ and entries[0].binsha == shaa
+ and entries[0].mode == fa[1]
+ )
# one side deletes, the other changes = conflict
fa = mkfile(bfn, shab)
@@ -209,8 +210,20 @@ class TestFun(TestBase):
def test_stat_mode_to_index_mode(self):
modes = (
- 0o600, 0o611, 0o640, 0o641, 0o644, 0o650, 0o651,
- 0o700, 0o711, 0o740, 0o744, 0o750, 0o751, 0o755,
+ 0o600,
+ 0o611,
+ 0o640,
+ 0o641,
+ 0o644,
+ 0o650,
+ 0o651,
+ 0o700,
+ 0o711,
+ 0o740,
+ 0o744,
+ 0o750,
+ 0o751,
+ 0o755,
)
for mode in modes:
expected_mode = S_IFREG | (mode & S_IXUSR and 0o755 or 0o644)
@@ -229,42 +242,46 @@ class TestFun(TestBase):
def test_tree_traversal(self):
# low level tree tarversal
odb = self.rorepo.odb
- H = self.rorepo.tree('29eb123beb1c55e5db4aa652d843adccbd09ae18') # head tree
- M = self.rorepo.tree('e14e3f143e7260de9581aee27e5a9b2645db72de') # merge tree
- B = self.rorepo.tree('f606937a7a21237c866efafcad33675e6539c103') # base tree
- B_old = self.rorepo.tree('1f66cfbbce58b4b552b041707a12d437cc5f400a') # old base tree
+ H = self.rorepo.tree("29eb123beb1c55e5db4aa652d843adccbd09ae18") # head tree
+ M = self.rorepo.tree("e14e3f143e7260de9581aee27e5a9b2645db72de") # merge tree
+ B = self.rorepo.tree("f606937a7a21237c866efafcad33675e6539c103") # base tree
+ B_old = self.rorepo.tree(
+ "1f66cfbbce58b4b552b041707a12d437cc5f400a"
+ ) # old base tree
# two very different trees
- entries = traverse_trees_recursive(odb, [B_old.binsha, H.binsha], '')
+ entries = traverse_trees_recursive(odb, [B_old.binsha, H.binsha], "")
self._assert_tree_entries(entries, 2)
- oentries = traverse_trees_recursive(odb, [H.binsha, B_old.binsha], '')
+ oentries = traverse_trees_recursive(odb, [H.binsha, B_old.binsha], "")
assert len(oentries) == len(entries)
self._assert_tree_entries(oentries, 2)
# single tree
- is_no_tree = lambda i, d: i.type != 'tree'
- entries = traverse_trees_recursive(odb, [B.binsha], '')
+ is_no_tree = lambda i, d: i.type != "tree"
+ entries = traverse_trees_recursive(odb, [B.binsha], "")
assert len(entries) == len(list(B.traverse(predicate=is_no_tree)))
self._assert_tree_entries(entries, 1)
# two trees
- entries = traverse_trees_recursive(odb, [B.binsha, H.binsha], '')
+ entries = traverse_trees_recursive(odb, [B.binsha, H.binsha], "")
self._assert_tree_entries(entries, 2)
# tree trees
- entries = traverse_trees_recursive(odb, [B.binsha, H.binsha, M.binsha], '')
+ entries = traverse_trees_recursive(odb, [B.binsha, H.binsha, M.binsha], "")
self._assert_tree_entries(entries, 3)
def test_tree_traversal_single(self):
max_count = 50
count = 0
odb = self.rorepo.odb
- for commit in self.rorepo.commit("29eb123beb1c55e5db4aa652d843adccbd09ae18").traverse():
+ for commit in self.rorepo.commit(
+ "29eb123beb1c55e5db4aa652d843adccbd09ae18"
+ ).traverse():
if count >= max_count:
break
count += 1
- entries = traverse_tree_recursive(odb, commit.tree.binsha, '')
+ entries = traverse_tree_recursive(odb, commit.tree.binsha, "")
assert entries
# END for each commit
@@ -275,12 +292,12 @@ class TestFun(TestBase):
if git.version_info[:3] < (2, 5, 1):
raise SkipTest("worktree feature unsupported")
- rw_master = self.rorepo.clone(join_path_native(rw_dir, 'master_repo'))
- branch = rw_master.create_head('aaaaaaaa')
- worktree_path = join_path_native(rw_dir, 'worktree_repo')
+ rw_master = self.rorepo.clone(join_path_native(rw_dir, "master_repo"))
+ branch = rw_master.create_head("aaaaaaaa")
+ worktree_path = join_path_native(rw_dir, "worktree_repo")
if Git.is_cygwin():
worktree_path = cygpath(worktree_path)
- rw_master.git.worktree('add', worktree_path, branch.name)
+ rw_master.git.worktree("add", worktree_path, branch.name)
dotgit = osp.join(worktree_path, ".git")
statbuf = stat(dotgit)
@@ -292,5 +309,5 @@ class TestFun(TestBase):
self.assertTrue(statbuf.st_mode & S_IFDIR)
def test_tree_entries_from_data_with_failing_name_decode_py3(self):
- r = tree_entries_from_data(b'100644 \x9f\0aaa')
- assert r == [(b'aaa', 33188, '\udc9f')], r
+ r = tree_entries_from_data(b"100644 \x9f\0aaa")
+ assert r == [(b"aaa", 33188, "\udc9f")], r
diff --git a/test/test_git.py b/test/test_git.py
index 10e21487..2a034e41 100644
--- a/test/test_git.py
+++ b/test/test_git.py
@@ -10,18 +10,8 @@ import sys
from tempfile import TemporaryFile
from unittest import mock
-from git import (
- Git,
- refresh,
- GitCommandError,
- GitCommandNotFound,
- Repo,
- cmd
-)
-from test.lib import (
- TestBase,
- fixture_path
-)
+from git import Git, refresh, GitCommandError, GitCommandNotFound, Repo, cmd
+from test.lib import TestBase, fixture_path
from test.lib import with_rw_directory
from git.util import finalize_process
@@ -31,7 +21,6 @@ from git.compat import is_win
class TestGit(TestBase):
-
@classmethod
def setUpClass(cls):
super(TestGit, cls).setUpClass()
@@ -39,56 +28,72 @@ class TestGit(TestBase):
def tearDown(self):
import gc
+
gc.collect()
- @mock.patch.object(Git, 'execute')
+ @mock.patch.object(Git, "execute")
def test_call_process_calls_execute(self, git):
- git.return_value = ''
+ git.return_value = ""
self.git.version()
self.assertTrue(git.called)
- self.assertEqual(git.call_args, ((['git', 'version'],), {}))
+ self.assertEqual(git.call_args, ((["git", "version"],), {}))
def test_call_unpack_args_unicode(self):
- args = Git._Git__unpack_args('Unicode€™')
- mangled_value = 'Unicode\u20ac\u2122'
+ args = Git._Git__unpack_args("Unicode€™")
+ mangled_value = "Unicode\u20ac\u2122"
self.assertEqual(args, [mangled_value])
def test_call_unpack_args(self):
- args = Git._Git__unpack_args(['git', 'log', '--', 'Unicode€™'])
- mangled_value = 'Unicode\u20ac\u2122'
- self.assertEqual(args, ['git', 'log', '--', mangled_value])
+ args = Git._Git__unpack_args(["git", "log", "--", "Unicode€™"])
+ mangled_value = "Unicode\u20ac\u2122"
+ self.assertEqual(args, ["git", "log", "--", mangled_value])
def test_it_raises_errors(self):
self.assertRaises(GitCommandError, self.git.this_does_not_exist)
def test_it_transforms_kwargs_into_git_command_arguments(self):
- self.assertEqual(["-s"], self.git.transform_kwargs(**{'s': True}))
- self.assertEqual(["-s", "5"], self.git.transform_kwargs(**{'s': 5}))
- self.assertEqual([], self.git.transform_kwargs(**{'s': None}))
+ self.assertEqual(["-s"], self.git.transform_kwargs(**{"s": True}))
+ self.assertEqual(["-s", "5"], self.git.transform_kwargs(**{"s": 5}))
+ self.assertEqual([], self.git.transform_kwargs(**{"s": None}))
- self.assertEqual(["--max-count"], self.git.transform_kwargs(**{'max_count': True}))
- self.assertEqual(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5}))
- self.assertEqual(["--max-count=0"], self.git.transform_kwargs(**{'max_count': 0}))
- self.assertEqual([], self.git.transform_kwargs(**{'max_count': None}))
+ self.assertEqual(
+ ["--max-count"], self.git.transform_kwargs(**{"max_count": True})
+ )
+ self.assertEqual(
+ ["--max-count=5"], self.git.transform_kwargs(**{"max_count": 5})
+ )
+ self.assertEqual(
+ ["--max-count=0"], self.git.transform_kwargs(**{"max_count": 0})
+ )
+ self.assertEqual([], self.git.transform_kwargs(**{"max_count": None}))
# Multiple args are supported by using lists/tuples
- self.assertEqual(["-L", "1-3", "-L", "12-18"], self.git.transform_kwargs(**{'L': ('1-3', '12-18')}))
- self.assertEqual(["-C", "-C"], self.git.transform_kwargs(**{'C': [True, True, None, False]}))
+ self.assertEqual(
+ ["-L", "1-3", "-L", "12-18"],
+ self.git.transform_kwargs(**{"L": ("1-3", "12-18")}),
+ )
+ self.assertEqual(
+ ["-C", "-C"], self.git.transform_kwargs(**{"C": [True, True, None, False]})
+ )
# order is undefined
- res = self.git.transform_kwargs(**{'s': True, 't': True})
- self.assertEqual({'-s', '-t'}, set(res))
+ res = self.git.transform_kwargs(**{"s": True, "t": True})
+ self.assertEqual({"-s", "-t"}, set(res))
def test_it_executes_git_to_shell_and_returns_result(self):
- self.assertRegex(self.git.execute(["git", "version"]), r'^git version [\d\.]{2}.*$')
+ self.assertRegex(
+ self.git.execute(["git", "version"]), r"^git version [\d\.]{2}.*$"
+ )
def test_it_accepts_stdin(self):
filename = fixture_path("cat_file_blob")
- with open(filename, 'r') as fh:
- self.assertEqual("70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
- self.git.hash_object(istream=fh, stdin=True))
+ with open(filename, "r") as fh:
+ self.assertEqual(
+ "70c379b63ffa0795fdbfbc128e5a2818397b7ef8",
+ self.git.hash_object(istream=fh, stdin=True),
+ )
- @mock.patch.object(Git, 'execute')
+ @mock.patch.object(Git, "execute")
def test_it_ignores_false_kwargs(self, git):
# this_should_not_be_ignored=False implies it *should* be ignored
self.git.version(pass_this_kwarg=False)
@@ -96,22 +101,27 @@ class TestGit(TestBase):
def test_it_raises_proper_exception_with_output_stream(self):
tmp_file = TemporaryFile()
- self.assertRaises(GitCommandError, self.git.checkout, 'non-existent-branch', output_stream=tmp_file)
+ self.assertRaises(
+ GitCommandError,
+ self.git.checkout,
+ "non-existent-branch",
+ output_stream=tmp_file,
+ )
def test_it_accepts_environment_variables(self):
filename = fixture_path("ls_tree_empty")
- with open(filename, 'r') as fh:
+ with open(filename, "r") as fh:
tree = self.git.mktree(istream=fh)
env = {
- 'GIT_AUTHOR_NAME': 'Author Name',
- 'GIT_AUTHOR_EMAIL': 'author@example.com',
- 'GIT_AUTHOR_DATE': '1400000000+0000',
- 'GIT_COMMITTER_NAME': 'Committer Name',
- 'GIT_COMMITTER_EMAIL': 'committer@example.com',
- 'GIT_COMMITTER_DATE': '1500000000+0000',
+ "GIT_AUTHOR_NAME": "Author Name",
+ "GIT_AUTHOR_EMAIL": "author@example.com",
+ "GIT_AUTHOR_DATE": "1400000000+0000",
+ "GIT_COMMITTER_NAME": "Committer Name",
+ "GIT_COMMITTER_EMAIL": "committer@example.com",
+ "GIT_COMMITTER_DATE": "1500000000+0000",
}
- commit = self.git.commit_tree(tree, m='message', env=env)
- self.assertEqual(commit, '4cfd6b0314682d5a58f80be39850bad1640e9241')
+ commit = self.git.commit_tree(tree, m="message", env=env)
+ self.assertEqual(commit, "4cfd6b0314682d5a58f80be39850bad1640e9241")
def test_persistent_cat_file_command(self):
# read header only
@@ -124,9 +134,7 @@ class TestGit(TestBase):
obj_info = g.stdout.readline()
# read header + data
- g = self.git.cat_file(
- batch=True, istream=subprocess.PIPE, as_process=True
- )
+ g = self.git.cat_file(batch=True, istream=subprocess.PIPE, as_process=True)
g.stdin.write(b"b2339455342180c7cc1e9bba3e9f181f7baa5167\n")
g.stdin.flush()
obj_info_two = g.stdout.readline()
@@ -161,7 +169,8 @@ class TestGit(TestBase):
try:
# set it to something that doesn't exist, assure it raises
type(self.git).GIT_PYTHON_GIT_EXECUTABLE = osp.join(
- "some", "path", "which", "doesn't", "exist", "gitbinary")
+ "some", "path", "which", "doesn't", "exist", "gitbinary"
+ )
self.assertRaises(exc, self.git.version)
finally:
type(self.git).GIT_PYTHON_GIT_EXECUTABLE = prev_cmd
@@ -173,7 +182,7 @@ class TestGit(TestBase):
# test a good path refresh
which_cmd = "where" if is_win else "which"
- path = os.popen("{0} git".format(which_cmd)).read().strip().split('\n')[0]
+ path = os.popen("{0} git".format(which_cmd)).read().strip().split("\n")[0]
refresh(path)
def test_options_are_passed_to_git(self):
@@ -197,8 +206,10 @@ class TestGit(TestBase):
self.assertRaises(GitCommandError, self.git.NoOp)
def test_single_char_git_options_are_passed_to_git(self):
- input_value = 'TestValue'
- output_value = self.git(c='user.name=%s' % input_value).config('--get', 'user.name')
+ input_value = "TestValue"
+ output_value = self.git(c="user.name=%s" % input_value).config(
+ "--get", "user.name"
+ )
self.assertEqual(input_value, output_value)
def test_change_to_transform_kwargs_does_not_break_command_options(self):
@@ -206,11 +217,13 @@ class TestGit(TestBase):
def test_insert_after_kwarg_raises(self):
# This isn't a complete add command, which doesn't matter here
- self.assertRaises(ValueError, self.git.remote, 'add', insert_kwargs_after='foo')
+ self.assertRaises(ValueError, self.git.remote, "add", insert_kwargs_after="foo")
def test_env_vars_passed_to_git(self):
- editor = 'non_existent_editor'
- with mock.patch.dict('os.environ', {'GIT_EDITOR': editor}): # @UndefinedVariable
+ editor = "non_existent_editor"
+ with mock.patch.dict(
+ "os.environ", {"GIT_EDITOR": editor}
+ ): # @UndefinedVariable
self.assertEqual(self.git.var("GIT_EDITOR"), editor)
@with_rw_directory
@@ -219,35 +232,34 @@ class TestGit(TestBase):
self.assertEqual(self.git.environment(), {})
# make sure the context manager works and cleans up after itself
- with self.git.custom_environment(PWD='/tmp'):
- self.assertEqual(self.git.environment(), {'PWD': '/tmp'})
+ with self.git.custom_environment(PWD="/tmp"):
+ self.assertEqual(self.git.environment(), {"PWD": "/tmp"})
self.assertEqual(self.git.environment(), {})
- old_env = self.git.update_environment(VARKEY='VARVALUE')
+ old_env = self.git.update_environment(VARKEY="VARVALUE")
# The returned dict can be used to revert the change, hence why it has
# an entry with value 'None'.
- self.assertEqual(old_env, {'VARKEY': None})
- self.assertEqual(self.git.environment(), {'VARKEY': 'VARVALUE'})
+ self.assertEqual(old_env, {"VARKEY": None})
+ self.assertEqual(self.git.environment(), {"VARKEY": "VARVALUE"})
new_env = self.git.update_environment(**old_env)
- self.assertEqual(new_env, {'VARKEY': 'VARVALUE'})
+ self.assertEqual(new_env, {"VARKEY": "VARVALUE"})
self.assertEqual(self.git.environment(), {})
- path = osp.join(rw_dir, 'failing-script.sh')
- with open(path, 'wt') as stream:
- stream.write("#!/usr/bin/env sh\n"
- "echo FOO\n")
+ path = osp.join(rw_dir, "failing-script.sh")
+ with open(path, "wt") as stream:
+ stream.write("#!/usr/bin/env sh\n" "echo FOO\n")
os.chmod(path, 0o777)
- rw_repo = Repo.init(osp.join(rw_dir, 'repo'))
- remote = rw_repo.create_remote('ssh-origin', "ssh://git@server/foo")
+ rw_repo = Repo.init(osp.join(rw_dir, "repo"))
+ remote = rw_repo.create_remote("ssh-origin", "ssh://git@server/foo")
with rw_repo.git.custom_environment(GIT_SSH=path):
try:
remote.fetch()
except GitCommandError as err:
- self.assertIn('FOO', str(err))
+ self.assertIn("FOO", str(err))
def test_handle_process_output(self):
from git.cmd import handle_process_output
@@ -261,14 +273,19 @@ class TestGit(TestBase):
def counter_stderr(line):
count[2] += 1
- cmdline = [sys.executable, fixture_path('cat_file.py'), str(fixture_path('issue-301_stderr'))]
- proc = subprocess.Popen(cmdline,
- stdin=None,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- shell=False,
- creationflags=cmd.PROC_CREATIONFLAGS,
- )
+ cmdline = [
+ sys.executable,
+ fixture_path("cat_file.py"),
+ str(fixture_path("issue-301_stderr")),
+ ]
+ proc = subprocess.Popen(
+ cmdline,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=False,
+ creationflags=cmd.PROC_CREATIONFLAGS,
+ )
handle_process_output(proc, counter_stdout, counter_stderr, finalize_process)
diff --git a/test/test_index.py b/test/test_index.py
index 4a20a8f6..3f88f5c5 100644
--- a/test/test_index.py
+++ b/test/test_index.py
@@ -7,10 +7,7 @@
from io import BytesIO
import os
-from stat import (
- S_ISLNK,
- ST_MODE
-)
+from stat import S_ISLNK, ST_MODE
import tempfile
from unittest import skipIf
import shutil
@@ -27,22 +24,11 @@ from git import (
CheckoutError,
)
from git.compat import is_win
-from git.exc import (
- HookExecutionError,
- InvalidGitRepositoryError
-)
+from git.exc import HookExecutionError, InvalidGitRepositoryError
from git.index.fun import hook_path
-from git.index.typ import (
- BaseIndexEntry,
- IndexEntry
-)
+from git.index.typ import BaseIndexEntry, IndexEntry
from git.objects import Blob
-from test.lib import (
- TestBase,
- fixture_path,
- fixture,
- with_rw_repo
-)
+from test.lib import TestBase, fixture_path, fixture, with_rw_repo
from test.lib import with_rw_directory
from git.util import Actor, rmtree
from git.util import HIDE_WINDOWS_KNOWN_ERRORS, hex_to_bin
@@ -53,7 +39,7 @@ from git.cmd import Git
HOOKS_SHEBANG = "#!/usr/bin/env sh\n"
-is_win_without_bash = is_win and not shutil.which('bash.exe')
+is_win_without_bash = is_win and not shutil.which("bash.exe")
def _make_hook(git_dir, name, content, make_exec=True):
@@ -70,7 +56,6 @@ def _make_hook(git_dir, name, content, make_exec=True):
class TestIndex(TestBase):
-
def __init__(self, *args):
super(TestIndex, self).__init__(*args)
self._reset_progress()
@@ -116,8 +101,20 @@ class TestIndex(TestBase):
# test entry
entry = next(iter(index.entries.values()))
- for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid",
- "gid", "size", "binsha", "hexsha", "stage"):
+ for attr in (
+ "path",
+ "ctime",
+ "mtime",
+ "dev",
+ "inode",
+ "mode",
+ "uid",
+ "gid",
+ "size",
+ "binsha",
+ "hexsha",
+ "stage",
+ ):
getattr(entry, attr)
# END for each method
@@ -134,7 +131,7 @@ class TestIndex(TestBase):
# write the data - it must match the original
tmpfile = tempfile.mktemp()
index_merge.write(tmpfile)
- with open(tmpfile, 'rb') as fp:
+ with open(tmpfile, "rb") as fp:
self.assertEqual(fp.read(), fixture("index_merge"))
os.remove(tmpfile)
@@ -144,21 +141,25 @@ class TestIndex(TestBase):
tree = self.rorepo.commit(tree).tree
blist = []
- for blob in tree.traverse(predicate=lambda e, d: e.type == "blob", branch_first=False):
+ for blob in tree.traverse(
+ predicate=lambda e, d: e.type == "blob", branch_first=False
+ ):
assert (blob.path, 0) in index.entries
blist.append(blob)
# END for each blob in tree
if len(blist) != len(index.entries):
iset = {k[0] for k in index.entries.keys()}
bset = {b.path for b in blist}
- raise AssertionError("CMP Failed: Missing entries in index: %s, missing in tree: %s" %
- (bset - iset, iset - bset))
+ raise AssertionError(
+ "CMP Failed: Missing entries in index: %s, missing in tree: %s"
+ % (bset - iset, iset - bset)
+ )
# END assertion message
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_index_lock_handling(self, rw_repo):
def add_bad_blob():
- rw_repo.index.add([Blob(rw_repo, b'f' * 20, 'bad-permissions', 'foo')])
+ rw_repo.index.add([Blob(rw_repo, b"f" * 20, "bad-permissions", "foo")])
try:
## 1st fail on purpose adding into index.
@@ -174,7 +175,7 @@ class TestIndex(TestBase):
except Exception as ex:
assert "index.lock' could not be obtained" not in str(ex)
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_index_file_from_tree(self, rw_repo):
common_ancestor_sha = "5117c9c8a4d3af19a9958677e45cda9269de1541"
cur_sha = "4b43ca7ff72d5f535134241e7c797ddc9c7a3573"
@@ -191,7 +192,9 @@ class TestIndex(TestBase):
self._cmp_tree_index(cur_sha, two_way_index)
# merge three trees - here we have a merge conflict
- three_way_index = IndexFile.from_tree(rw_repo, common_ancestor_sha, cur_sha, other_sha)
+ three_way_index = IndexFile.from_tree(
+ rw_repo, common_ancestor_sha, cur_sha, other_sha
+ )
assert len([e for e in three_way_index.entries.values() if e.stage != 0])
# ITERATE BLOBS
@@ -202,7 +205,7 @@ class TestIndex(TestBase):
assert isinstance(merge_blobs[0][1], Blob)
# test BlobFilter
- prefix = 'lib/git'
+ prefix = "lib/git"
for _stage, blob in base_index.iter_blobs(BlobFilter([prefix])):
assert blob.path.startswith(prefix)
@@ -224,7 +227,7 @@ class TestIndex(TestBase):
# END for each blob
self.assertEqual(num_blobs, len(three_way_index.entries))
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_index_merge_tree(self, rw_repo):
# A bit out of place, but we need a different repo for this:
self.assertNotEqual(self.rorepo, rw_repo)
@@ -234,21 +237,25 @@ class TestIndex(TestBase):
# current index is at the (virtual) cur_commit
next_commit = "4c39f9da792792d4e73fc3a5effde66576ae128c"
parent_commit = rw_repo.head.commit.parents[0]
- manifest_key = IndexFile.entry_key('MANIFEST.in', 0)
+ manifest_key = IndexFile.entry_key("MANIFEST.in", 0)
manifest_entry = rw_repo.index.entries[manifest_key]
rw_repo.index.merge_tree(next_commit)
# only one change should be recorded
assert manifest_entry.binsha != rw_repo.index.entries[manifest_key].binsha
rw_repo.index.reset(rw_repo.head)
- self.assertEqual(rw_repo.index.entries[manifest_key].binsha, manifest_entry.binsha)
+ self.assertEqual(
+ rw_repo.index.entries[manifest_key].binsha, manifest_entry.binsha
+ )
# FAKE MERGE
#############
# Add a change with a NULL sha that should conflict with next_commit. We
# pretend there was a change, but we do not even bother adding a proper
# sha for it ( which makes things faster of course )
- manifest_fake_entry = BaseIndexEntry((manifest_entry[0], b"\0" * 20, 0, manifest_entry[3]))
+ manifest_fake_entry = BaseIndexEntry(
+ (manifest_entry[0], b"\0" * 20, 0, manifest_entry[3])
+ )
# try write flag
self._assert_entries(rw_repo.index.add([manifest_fake_entry], write=False))
# add actually resolves the null-hex-sha for us as a feature, but we can
@@ -267,7 +274,9 @@ class TestIndex(TestBase):
# a three way merge would result in a conflict and fails as the command will
# not overwrite any entries in our index and hence leave them unmerged. This is
# mainly a protection feature as the current index is not yet in a tree
- self.assertRaises(GitCommandError, index.merge_tree, next_commit, base=parent_commit)
+ self.assertRaises(
+ GitCommandError, index.merge_tree, next_commit, base=parent_commit
+ )
# the only way to get the merged entries is to safe the current index away into a tree,
# which is like a temporary commit for us. This fails as well as the NULL sha deos not
@@ -277,7 +286,9 @@ class TestIndex(TestBase):
# if missing objects are okay, this would work though ( they are always okay now )
# As we can't read back the tree with NULL_SHA, we rather set it to something else
- index.entries[manifest_key] = IndexEntry(manifest_entry[:1] + (hex_to_bin('f' * 40),) + manifest_entry[2:])
+ index.entries[manifest_key] = IndexEntry(
+ manifest_entry[:1] + (hex_to_bin("f" * 40),) + manifest_entry[2:]
+ )
tree = index.write_tree()
# now make a proper three way merge with unmerged entries
@@ -286,7 +297,7 @@ class TestIndex(TestBase):
self.assertEqual(len(unmerged_blobs), 1)
self.assertEqual(list(unmerged_blobs.keys())[0], manifest_key[0])
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_index_file_diffing(self, rw_repo):
# default Index instance points to our index
index = IndexFile(rw_repo)
@@ -302,14 +313,14 @@ class TestIndex(TestBase):
# resetting the head will leave the index in a different state, and the
# diff will yield a few changes
cur_head_commit = rw_repo.head.reference.commit
- rw_repo.head.reset('HEAD~6', index=True, working_tree=False)
+ rw_repo.head.reset("HEAD~6", index=True, working_tree=False)
# diff against same index is 0
diff = index.diff()
self.assertEqual(len(diff), 0)
# against HEAD as string, must be the same as it matches index
- diff = index.diff('HEAD')
+ diff = index.diff("HEAD")
self.assertEqual(len(diff), 0)
# against previous head, there must be a difference
@@ -318,9 +329,9 @@ class TestIndex(TestBase):
# we reverse the result
adiff = index.diff(str(cur_head_commit), R=True)
- odiff = index.diff(cur_head_commit, R=False) # now its not reversed anymore
+ odiff = index.diff(cur_head_commit, R=False) # now its not reversed anymore
assert adiff != odiff
- self.assertEqual(odiff, diff) # both unreversed diffs against HEAD
+ self.assertEqual(odiff, diff) # both unreversed diffs against HEAD
# against working copy - its still at cur_commit
wdiff = index.diff(None)
@@ -333,7 +344,7 @@ class TestIndex(TestBase):
# adjust the index to match an old revision
cur_branch = rw_repo.active_branch
cur_commit = cur_branch.commit
- rev_head_parent = 'HEAD~1'
+ rev_head_parent = "HEAD~1"
assert index.reset(rev_head_parent) is index
self.assertEqual(cur_branch, rw_repo.active_branch)
@@ -351,28 +362,28 @@ class TestIndex(TestBase):
assert not index.diff(None)
self.assertEqual(cur_branch, rw_repo.active_branch)
self.assertEqual(cur_commit, rw_repo.head.commit)
- with open(file_path, 'rb') as fp:
+ with open(file_path, "rb") as fp:
assert fp.read() != new_data
# test full checkout
test_file = osp.join(rw_repo.working_tree_dir, "CHANGES")
- with open(test_file, 'ab') as fd:
+ with open(test_file, "ab") as fd:
fd.write(b"some data")
rval = index.checkout(None, force=True, fprogress=self._fprogress)
- assert 'CHANGES' in list(rval)
+ assert "CHANGES" in list(rval)
self._assert_fprogress([None])
assert osp.isfile(test_file)
os.remove(test_file)
rval = index.checkout(None, force=False, fprogress=self._fprogress)
- assert 'CHANGES' in list(rval)
+ assert "CHANGES" in list(rval)
self._assert_fprogress([None])
assert osp.isfile(test_file)
# individual file
os.remove(test_file)
rval = index.checkout(test_file, fprogress=self._fprogress)
- self.assertEqual(list(rval)[0], 'CHANGES')
+ self.assertEqual(list(rval)[0], "CHANGES")
self._assert_fprogress([test_file])
assert osp.exists(test_file)
@@ -394,7 +405,7 @@ class TestIndex(TestBase):
self.assertEqual(len(e.failed_files), len(e.failed_reasons))
self.assertIsInstance(e.failed_reasons[0], str)
self.assertEqual(len(e.valid_files), 0)
- with open(test_file, 'rb') as fd:
+ with open(test_file, "rb") as fd:
s = fd.read()
self.assertTrue(s.endswith(append_data), s)
else:
@@ -402,11 +413,11 @@ class TestIndex(TestBase):
# if we force it it should work
index.checkout(test_file, force=True)
- assert not open(test_file, 'rb').read().endswith(append_data)
+ assert not open(test_file, "rb").read().endswith(append_data)
# checkout directory
rmtree(osp.join(rw_repo.working_tree_dir, "lib"))
- rval = index.checkout('lib')
+ rval = index.checkout("lib")
assert len(list(rval)) > 1
def _count_existing(self, repo, files):
@@ -419,15 +430,18 @@ class TestIndex(TestBase):
existing += osp.isfile(osp.join(basedir, f))
# END for each deleted file
return existing
+
# END num existing helper
- @skipIf(HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(),
- """FIXME: File "C:\\projects\\gitpython\\git\\test\\test_index.py", line 642, in test_index_mutation
+ @skipIf(
+ HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(),
+ """FIXME: File "C:\\projects\\gitpython\\git\\test\\test_index.py", line 642, in test_index_mutation
self.assertEqual(fd.read(), link_target)
AssertionError: '!<symlink>\xff\xfe/\x00e\x00t\x00c\x00/\x00t\x00h\x00a\x00t\x00\x00\x00'
!= '/etc/that'
- """)
- @with_rw_repo('0.1.6')
+ """,
+ )
+ @with_rw_repo("0.1.6")
def test_index_mutation(self, rw_repo):
index = rw_repo.index
num_entries = len(index.entries)
@@ -446,7 +460,7 @@ class TestIndex(TestBase):
count = 0
for entry in index.entries.values():
type_id = count % 4
- if type_id == 0: # path
+ if type_id == 0: # path
yield entry.path
elif type_id == 1: # blob
yield Blob(rw_repo, entry.binsha, entry.mode, entry.path)
@@ -458,10 +472,13 @@ class TestIndex(TestBase):
raise AssertionError("Invalid Type")
count += 1
# END for each entry
+
# END mixed iterator
deleted_files = index.remove(mixed_iterator(), working_tree=False)
assert deleted_files
- self.assertEqual(self._count_existing(rw_repo, deleted_files), len(deleted_files))
+ self.assertEqual(
+ self._count_existing(rw_repo, deleted_files), len(deleted_files)
+ )
self.assertEqual(len(index.entries), 0)
# reset the index to undo our changes
@@ -475,13 +492,17 @@ class TestIndex(TestBase):
# reset everything
index.reset(working_tree=True)
- self.assertEqual(self._count_existing(rw_repo, deleted_files), len(deleted_files))
+ self.assertEqual(
+ self._count_existing(rw_repo, deleted_files), len(deleted_files)
+ )
# invalid type
self.assertRaises(TypeError, index.remove, [1])
# absolute path
- deleted_files = index.remove([osp.join(rw_repo.working_tree_dir, "lib")], r=True)
+ deleted_files = index.remove(
+ [osp.join(rw_repo.working_tree_dir, "lib")], r=True
+ )
assert len(deleted_files) > 1
self.assertRaises(ValueError, index.remove, ["/doesnt/exists"])
@@ -506,7 +527,9 @@ class TestIndex(TestBase):
my_author = Actor("Frèderic Çaufl€", "author@example.com")
my_committer = Actor("Committing Frèderic Çaufl€", "committer@example.com")
- commit_actor = index.commit(commit_message, author=my_author, committer=my_committer)
+ commit_actor = index.commit(
+ commit_message, author=my_author, committer=my_committer
+ )
assert cur_commit != commit_actor
self.assertEqual(commit_actor.author.name, "Frèderic Çaufl€")
self.assertEqual(commit_actor.author.email, "author@example.com")
@@ -522,7 +545,11 @@ class TestIndex(TestBase):
cur_commit = cur_head.commit
commit_message = "commit with dates by Avinash Sajjanshetty"
- new_commit = index.commit(commit_message, author_date="2006-04-07T22:13:13", commit_date="2005-04-07T22:13:13")
+ new_commit = index.commit(
+ commit_message,
+ author_date="2006-04-07T22:13:13",
+ commit_date="2005-04-07T22:13:13",
+ )
assert cur_commit != new_commit
print(new_commit.authored_date, new_commit.committed_date)
self.assertEqual(new_commit.message, commit_message)
@@ -538,7 +565,9 @@ class TestIndex(TestBase):
# same index, multiple parents
commit_message = "Index with multiple parents\n commit with another line"
- commit_multi_parent = index.commit(commit_message, parent_commits=(commit_no_parents, new_commit))
+ commit_multi_parent = index.commit(
+ commit_message, parent_commits=(commit_no_parents, new_commit)
+ )
self.assertEqual(commit_multi_parent.message, commit_message)
self.assertEqual(len(commit_multi_parent.parents), 2)
self.assertEqual(commit_multi_parent.parents[0], commit_no_parents)
@@ -547,26 +576,32 @@ class TestIndex(TestBase):
# re-add all files in lib
# get the lib folder back on disk, but get an index without it
- index.reset(new_commit.parents[0], working_tree=True).reset(new_commit, working_tree=False)
+ index.reset(new_commit.parents[0], working_tree=True).reset(
+ new_commit, working_tree=False
+ )
lib_file_path = osp.join("lib", "git", "__init__.py")
assert (lib_file_path, 0) not in index.entries
assert osp.isfile(osp.join(rw_repo.working_tree_dir, lib_file_path))
# directory
- entries = index.add(['lib'], fprogress=self._fprogress_add)
+ entries = index.add(["lib"], fprogress=self._fprogress_add)
self._assert_entries(entries)
self._assert_fprogress(entries)
assert len(entries) > 1
# glob
- entries = index.reset(new_commit).add([osp.join('lib', 'git', '*.py')], fprogress=self._fprogress_add)
+ entries = index.reset(new_commit).add(
+ [osp.join("lib", "git", "*.py")], fprogress=self._fprogress_add
+ )
self._assert_entries(entries)
self._assert_fprogress(entries)
self.assertEqual(len(entries), 14)
# same file
entries = index.reset(new_commit).add(
- [osp.join(rw_repo.working_tree_dir, 'lib', 'git', 'head.py')] * 2, fprogress=self._fprogress_add)
+ [osp.join(rw_repo.working_tree_dir, "lib", "git", "head.py")] * 2,
+ fprogress=self._fprogress_add,
+ )
self._assert_entries(entries)
self.assertEqual(entries[0].mode & 0o644, 0o644)
# would fail, test is too primitive to handle this case
@@ -575,7 +610,9 @@ class TestIndex(TestBase):
self.assertEqual(len(entries), 2)
# missing path
- self.assertRaises(OSError, index.reset(new_commit).add, ['doesnt/exist/must/raise'])
+ self.assertRaises(
+ OSError, index.reset(new_commit).add, ["doesnt/exist/must/raise"]
+ )
# blob from older revision overrides current index revision
old_blob = new_commit.parents[0].tree.blobs[0]
@@ -588,14 +625,19 @@ class TestIndex(TestBase):
# mode 0 not allowed
null_hex_sha = Diff.NULL_HEX_SHA
null_bin_sha = b"\0" * 20
- self.assertRaises(ValueError, index.reset(
- new_commit).add, [BaseIndexEntry((0, null_bin_sha, 0, "doesntmatter"))])
+ self.assertRaises(
+ ValueError,
+ index.reset(new_commit).add,
+ [BaseIndexEntry((0, null_bin_sha, 0, "doesntmatter"))],
+ )
# add new file
new_file_relapath = "my_new_file"
self._make_file(new_file_relapath, "hello world", rw_repo)
entries = index.reset(new_commit).add(
- [BaseIndexEntry((0o10644, null_bin_sha, 0, new_file_relapath))], fprogress=self._fprogress_add)
+ [BaseIndexEntry((0o10644, null_bin_sha, 0, new_file_relapath))],
+ fprogress=self._fprogress_add,
+ )
self._assert_entries(entries)
self._assert_fprogress(entries)
self.assertEqual(len(entries), 1)
@@ -603,20 +645,27 @@ class TestIndex(TestBase):
# add symlink
if not is_win:
- for target in ('/etc/nonexisting', '/etc/passwd', '/etc'):
+ for target in ("/etc/nonexisting", "/etc/passwd", "/etc"):
basename = "my_real_symlink"
link_file = osp.join(rw_repo.working_tree_dir, basename)
os.symlink(target, link_file)
- entries = index.reset(new_commit).add([link_file], fprogress=self._fprogress_add)
+ entries = index.reset(new_commit).add(
+ [link_file], fprogress=self._fprogress_add
+ )
self._assert_entries(entries)
self._assert_fprogress(entries)
self.assertEqual(len(entries), 1)
self.assertTrue(S_ISLNK(entries[0].mode))
- self.assertTrue(S_ISLNK(index.entries[index.entry_key("my_real_symlink", 0)].mode))
+ self.assertTrue(
+ S_ISLNK(index.entries[index.entry_key("my_real_symlink", 0)].mode)
+ )
# we expect only the target to be written
- self.assertEqual(index.repo.odb.stream(entries[0].binsha).read().decode('ascii'), target)
+ self.assertEqual(
+ index.repo.odb.stream(entries[0].binsha).read().decode("ascii"),
+ target,
+ )
os.remove(link_file)
# end for each target
@@ -627,7 +676,9 @@ class TestIndex(TestBase):
link_target = "/etc/that"
fake_symlink_path = self._make_file(fake_symlink_relapath, link_target, rw_repo)
fake_entry = BaseIndexEntry((0o120000, null_bin_sha, 0, fake_symlink_relapath))
- entries = index.reset(new_commit).add([fake_entry], fprogress=self._fprogress_add)
+ entries = index.reset(new_commit).add(
+ [fake_entry], fprogress=self._fprogress_add
+ )
self._assert_entries(entries)
self._assert_fprogress(entries)
assert entries[0].hexsha != null_hex_sha
@@ -635,7 +686,9 @@ class TestIndex(TestBase):
self.assertTrue(S_ISLNK(entries[0].mode))
# assure this also works with an alternate method
- full_index_entry = IndexEntry.from_base(BaseIndexEntry((0o120000, entries[0].binsha, 0, entries[0].path)))
+ full_index_entry = IndexEntry.from_base(
+ BaseIndexEntry((0o120000, entries[0].binsha, 0, entries[0].path))
+ )
entry_key = index.entry_key(full_index_entry)
index.reset(new_commit)
@@ -649,7 +702,7 @@ class TestIndex(TestBase):
# a tree created from this should contain the symlink
tree = index.write_tree()
assert fake_symlink_relapath in tree
- index.write() # flush our changes for the checkout
+ index.write() # flush our changes for the checkout
# checkout the fakelink, should be a link then
assert not S_ISLNK(os.stat(fake_symlink_path)[ST_MODE])
@@ -660,7 +713,7 @@ class TestIndex(TestBase):
if is_win:
# simlinks should contain the link as text ( which is what a
# symlink actually is )
- with open(fake_symlink_path, 'rt') as fd:
+ with open(fake_symlink_path, "rt") as fd:
self.assertEqual(fd.read(), link_target)
else:
self.assertTrue(S_ISLNK(os.lstat(fake_symlink_path)[ST_MODE]))
@@ -670,18 +723,19 @@ class TestIndex(TestBase):
for source, dest in rval:
assert not osp.exists(source) and osp.exists(dest)
# END for each renamed item
+
# END move assertion utility
- self.assertRaises(ValueError, index.move, ['just_one_path'])
+ self.assertRaises(ValueError, index.move, ["just_one_path"])
# file onto existing file
- files = ['AUTHORS', 'LICENSE']
+ files = ["AUTHORS", "LICENSE"]
self.assertRaises(GitCommandError, index.move, files)
# again, with force
assert_mv_rval(index.move(files, f=True))
# files into directory - dry run
- paths = ['LICENSE', 'VERSION', 'doc']
+ paths = ["LICENSE", "VERSION", "doc"]
rval = index.move(paths, dry_run=True)
self.assertEqual(len(rval), 2)
assert osp.exists(paths[0])
@@ -691,7 +745,7 @@ class TestIndex(TestBase):
assert_mv_rval(rval)
# dir into dir
- rval = index.move(['doc', 'test'])
+ rval = index.move(["doc", "test"])
assert_mv_rval(rval)
# TEST PATH REWRITING
@@ -702,21 +756,23 @@ class TestIndex(TestBase):
rval = str(count[0])
count[0] += 1
return rval
+
# END rewriter
def make_paths():
# two existing ones, one new one
- yield 'CHANGES'
- yield 'ez_setup.py'
- yield index.entries[index.entry_key('README', 0)]
- yield index.entries[index.entry_key('.gitignore', 0)]
+ yield "CHANGES"
+ yield "ez_setup.py"
+ yield index.entries[index.entry_key("README", 0)]
+ yield index.entries[index.entry_key(".gitignore", 0)]
for fid in range(3):
- fname = 'newfile%i' % fid
- with open(fname, 'wb') as fd:
+ fname = "newfile%i" % fid
+ with open(fname, "wb") as fd:
fd.write(b"abcd")
yield Blob(rw_repo, Blob.NULL_BIN_SHA, 0o100644, fname)
# END for each new file
+
# END path producer
paths = list(make_paths())
self._assert_entries(index.add(paths, path_rewriter=rewriter))
@@ -762,7 +818,7 @@ class TestIndex(TestBase):
for absfile in absfiles:
assert osp.isfile(absfile)
- @with_rw_repo('HEAD')
+ @with_rw_repo("HEAD")
def test_compare_write_tree(self, rw_repo):
# write all trees and compare them
# its important to have a few submodules in there too
@@ -776,16 +832,16 @@ class TestIndex(TestBase):
orig_tree = commit.tree
self.assertEqual(index.write_tree(), orig_tree)
# END for each commit
-
- @with_rw_repo('HEAD', bare=False)
+
+ @with_rw_repo("HEAD", bare=False)
def test_index_single_addremove(self, rw_repo):
- fp = osp.join(rw_repo.working_dir, 'testfile.txt')
- with open(fp, 'w') as fs:
- fs.write('content of testfile')
+ fp = osp.join(rw_repo.working_dir, "testfile.txt")
+ with open(fp, "w") as fs:
+ fs.write("content of testfile")
self._assert_entries(rw_repo.index.add(fp))
deleted_files = rw_repo.index.remove(fp)
assert deleted_files
-
+
def test_index_new(self):
B = self.rorepo.tree("6d9b1f4f9fa8c9f030e3207e7deacc5d5f8bba4e")
H = self.rorepo.tree("25dca42bac17d511b7e2ebdd9d1d679e7626db5f")
@@ -796,7 +852,7 @@ class TestIndex(TestBase):
assert isinstance(index, IndexFile)
# END for each arg tuple
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_index_bare_add(self, rw_bare_repo):
# Something is wrong after cloning to a bare repo, reading the
# property rw_bare_repo.working_tree_dir will return '/tmp'
@@ -804,12 +860,11 @@ class TestIndex(TestBase):
# a quick hack to make this test fail when expected.
assert rw_bare_repo.working_tree_dir is None
assert rw_bare_repo.bare
- contents = b'This is a BytesIO file'
+ contents = b"This is a BytesIO file"
filesize = len(contents)
fileobj = BytesIO(contents)
- filename = 'my-imaginary-file'
- istream = rw_bare_repo.odb.store(
- IStream(Blob.type, filesize, fileobj))
+ filename = "my-imaginary-file"
+ istream = rw_bare_repo.odb.store(IStream(Blob.type, filesize, fileobj))
entry = BaseIndexEntry((0o100644, istream.binsha, 0, filename))
try:
rw_bare_repo.index.add([entry])
@@ -818,7 +873,7 @@ class TestIndex(TestBase):
# Adding using a path should still require a non-bare repository.
asserted = False
- path = osp.join('git', 'test', 'test_index.py')
+ path = osp.join("git", "test", "test_index.py")
try:
rw_bare_repo.index.add([path])
except InvalidGitRepositoryError:
@@ -828,24 +883,24 @@ class TestIndex(TestBase):
@with_rw_directory
def test_add_utf8P_path(self, rw_dir):
# NOTE: fp is not a Unicode object in python 2 (which is the source of the problem)
- fp = osp.join(rw_dir, 'ø.txt')
- with open(fp, 'wb') as fs:
- fs.write('content of ø'.encode('utf-8'))
+ fp = osp.join(rw_dir, "ø.txt")
+ with open(fp, "wb") as fs:
+ fs.write("content of ø".encode("utf-8"))
r = Repo.init(rw_dir)
r.index.add([fp])
- r.index.commit('Added orig and prestable')
+ r.index.commit("Added orig and prestable")
@with_rw_directory
def test_add_a_file_with_wildcard_chars(self, rw_dir):
# see issue #407
- fp = osp.join(rw_dir, '[.exe')
+ fp = osp.join(rw_dir, "[.exe")
with open(fp, "wb") as f:
- f.write(b'something')
+ f.write(b"something")
r = Repo.init(rw_dir)
r.index.add([fp])
- r.index.commit('Added [.exe')
+ r.index.commit("Added [.exe")
def test__to_relative_path_at_root(self):
root = osp.abspath(os.sep)
@@ -856,29 +911,23 @@ class TestIndex(TestBase):
working_tree_dir = root
repo = Mocked()
- path = os.path.join(root, 'file')
+ path = os.path.join(root, "file")
index = IndexFile(repo)
rel = index._to_relative_path(path)
self.assertEqual(rel, os.path.relpath(path, root))
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_pre_commit_hook_success(self, rw_repo):
index = rw_repo.index
- _make_hook(
- index.repo.git_dir,
- 'pre-commit',
- "exit 0"
- )
+ _make_hook(index.repo.git_dir, "pre-commit", "exit 0")
index.commit("This should not fail")
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_pre_commit_hook_fail(self, rw_repo):
index = rw_repo.index
hp = _make_hook(
- index.repo.git_dir,
- 'pre-commit',
- "echo stdout; echo stderr 1>&2; exit 1"
+ index.repo.git_dir, "pre-commit", "echo stdout; echo stderr 1>&2; exit 1"
)
try:
index.commit("This should fail")
@@ -886,8 +935,8 @@ class TestIndex(TestBase):
if is_win_without_bash:
self.assertIsInstance(err.status, OSError)
self.assertEqual(err.command, [hp])
- self.assertEqual(err.stdout, '')
- self.assertEqual(err.stderr, '')
+ self.assertEqual(err.stdout, "")
+ self.assertEqual(err.stderr, "")
assert str(err)
else:
self.assertEqual(err.status, 1)
@@ -899,26 +948,26 @@ class TestIndex(TestBase):
raise AssertionError("Should have caught a HookExecutionError")
@skipIf(HIDE_WINDOWS_KNOWN_ERRORS, "TODO: fix hooks execution on Windows: #703")
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_commit_msg_hook_success(self, rw_repo):
commit_message = "commit default head by Frèderic Çaufl€"
from_hook_message = "from commit-msg"
index = rw_repo.index
_make_hook(
index.repo.git_dir,
- 'commit-msg',
- 'printf " {}" >> "$1"'.format(from_hook_message)
+ "commit-msg",
+ 'printf " {}" >> "$1"'.format(from_hook_message),
)
new_commit = index.commit(commit_message)
- self.assertEqual(new_commit.message, "{} {}".format(commit_message, from_hook_message))
+ self.assertEqual(
+ new_commit.message, "{} {}".format(commit_message, from_hook_message)
+ )
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_commit_msg_hook_fail(self, rw_repo):
index = rw_repo.index
hp = _make_hook(
- index.repo.git_dir,
- 'commit-msg',
- "echo stdout; echo stderr 1>&2; exit 1"
+ index.repo.git_dir, "commit-msg", "echo stdout; echo stderr 1>&2; exit 1"
)
try:
index.commit("This should fail")
@@ -926,8 +975,8 @@ class TestIndex(TestBase):
if is_win_without_bash:
self.assertIsInstance(err.status, OSError)
self.assertEqual(err.command, [hp])
- self.assertEqual(err.stdout, '')
- self.assertEqual(err.stderr, '')
+ self.assertEqual(err.stdout, "")
+ self.assertEqual(err.stderr, "")
assert str(err)
else:
self.assertEqual(err.status, 1)
diff --git a/test/test_installation.py b/test/test_installation.py
index 6117be98..2607ff4e 100644
--- a/test/test_installation.py
+++ b/test/test_installation.py
@@ -11,9 +11,9 @@ from test.lib.helper import with_rw_directory
class TestInstallation(TestBase):
def setUp_venv(self, rw_dir):
self.venv = rw_dir
- subprocess.run(['virtualenv', self.venv], stdout=subprocess.PIPE)
- self.python = os.path.join(self.venv, 'bin/python3')
- self.pip = os.path.join(self.venv, 'bin/pip3')
+ subprocess.run(["virtualenv", self.venv], stdout=subprocess.PIPE)
+ self.python = os.path.join(self.venv, "bin/python3")
+ self.pip = os.path.join(self.venv, "bin/pip3")
self.sources = os.path.join(self.venv, "src")
self.cwd = os.path.dirname(os.path.dirname(__file__))
os.symlink(self.cwd, self.sources, target_is_directory=True)
@@ -21,17 +21,46 @@ class TestInstallation(TestBase):
@with_rw_directory
def test_installation(self, rw_dir):
self.setUp_venv(rw_dir)
- result = subprocess.run([self.pip, 'install', '-r', 'requirements.txt'],
- stdout=subprocess.PIPE, cwd=self.sources)
- self.assertEqual(0, result.returncode, msg=result.stderr or result.stdout or "Can't install requirements")
- result = subprocess.run([self.python, 'setup.py', 'install'], stdout=subprocess.PIPE, cwd=self.sources)
- self.assertEqual(0, result.returncode, msg=result.stderr or result.stdout or "Can't build - setup.py failed")
- result = subprocess.run([self.python, '-c', 'import git'], stdout=subprocess.PIPE, cwd=self.sources)
- self.assertEqual(0, result.returncode, msg=result.stderr or result.stdout or "Selftest failed")
- result = subprocess.run([self.python, '-c', 'import sys;import git; print(sys.path)'],
- stdout=subprocess.PIPE, cwd=self.sources)
- syspath = result.stdout.decode('utf-8').splitlines()[0]
+ result = subprocess.run(
+ [self.pip, "install", "-r", "requirements.txt"],
+ stdout=subprocess.PIPE,
+ cwd=self.sources,
+ )
+ self.assertEqual(
+ 0,
+ result.returncode,
+ msg=result.stderr or result.stdout or "Can't install requirements",
+ )
+ result = subprocess.run(
+ [self.python, "setup.py", "install"],
+ stdout=subprocess.PIPE,
+ cwd=self.sources,
+ )
+ self.assertEqual(
+ 0,
+ result.returncode,
+ msg=result.stderr or result.stdout or "Can't build - setup.py failed",
+ )
+ result = subprocess.run(
+ [self.python, "-c", "import git"], stdout=subprocess.PIPE, cwd=self.sources
+ )
+ self.assertEqual(
+ 0,
+ result.returncode,
+ msg=result.stderr or result.stdout or "Selftest failed",
+ )
+ result = subprocess.run(
+ [self.python, "-c", "import sys;import git; print(sys.path)"],
+ stdout=subprocess.PIPE,
+ cwd=self.sources,
+ )
+ syspath = result.stdout.decode("utf-8").splitlines()[0]
syspath = ast.literal_eval(syspath)
- self.assertEqual('', syspath[0],
- msg='Failed to follow the conventions for https://docs.python.org/3/library/sys.html#sys.path')
- self.assertTrue(syspath[1].endswith('gitdb'), msg='Failed to add gitdb to sys.path')
+ self.assertEqual(
+ "",
+ syspath[0],
+ msg="Failed to follow the conventions for https://docs.python.org/3/library/sys.html#sys.path",
+ )
+ self.assertTrue(
+ syspath[1].endswith("gitdb"), msg="Failed to add gitdb to sys.path"
+ )
diff --git a/test/test_reflog.py b/test/test_reflog.py
index a6c15950..c126d3dc 100644
--- a/test/test_reflog.py
+++ b/test/test_reflog.py
@@ -2,28 +2,23 @@ import os
import tempfile
from git.objects import IndexObject
-from git.refs import (
- RefLogEntry,
- RefLog
-)
-from test.lib import (
- TestBase,
- fixture_path
-)
+from git.refs import RefLogEntry, RefLog
+from test.lib import TestBase, fixture_path
from git.util import Actor, rmtree, hex_to_bin
import os.path as osp
class TestRefLog(TestBase):
-
def test_reflogentry(self):
nullhexsha = IndexObject.NULL_HEX_SHA
- hexsha = 'F' * 40
- actor = Actor('name', 'email')
+ hexsha = "F" * 40
+ actor = Actor("name", "email")
msg = "message"
- self.assertRaises(ValueError, RefLogEntry.new, nullhexsha, hexsha, 'noactor', 0, 0, "")
+ self.assertRaises(
+ ValueError, RefLogEntry.new, nullhexsha, hexsha, "noactor", 0, 0, ""
+ )
e = RefLogEntry.new(nullhexsha, hexsha, actor, 0, 1, msg)
assert e.oldhexsha == nullhexsha
@@ -37,8 +32,8 @@ class TestRefLog(TestBase):
assert repr(e).startswith(nullhexsha)
def test_base(self):
- rlp_head = fixture_path('reflog_HEAD')
- rlp_master = fixture_path('reflog_master')
+ rlp_head = fixture_path("reflog_HEAD")
+ rlp_master = fixture_path("reflog_master")
tdir = tempfile.mktemp(suffix="test_reflogs")
os.mkdir(tdir)
@@ -52,13 +47,13 @@ class TestRefLog(TestBase):
assert len(reflog)
# iter_entries works with path and with stream
- assert len(list(RefLog.iter_entries(open(rlp_master, 'rb'))))
+ assert len(list(RefLog.iter_entries(open(rlp_master, "rb"))))
assert len(list(RefLog.iter_entries(rlp_master)))
# raise on invalid revlog
# TODO: Try multiple corrupted ones !
- pp = 'reflog_invalid_'
- for suffix in ('oldsha', 'newsha', 'email', 'date', 'sep'):
+ pp = "reflog_invalid_"
+ for suffix in ("oldsha", "newsha", "email", "date", "sep"):
self.assertRaises(ValueError, RefLog.from_file, fixture_path(pp + suffix))
# END for each invalid file
@@ -66,7 +61,7 @@ class TestRefLog(TestBase):
self.assertRaises(ValueError, RefLog().write)
# test serialize and deserialize - results must match exactly
- binsha = hex_to_bin(('f' * 40).encode('ascii'))
+ binsha = hex_to_bin(("f" * 40).encode("ascii"))
msg = "my reflog message"
cr = self.rorepo.config_reader()
for rlp in (rlp_head, rlp_master):
@@ -83,9 +78,11 @@ class TestRefLog(TestBase):
assert open(tfile).read() == open(rlp).read()
# append an entry
- entry = RefLog.append_entry(cr, tfile, IndexObject.NULL_BIN_SHA, binsha, msg)
+ entry = RefLog.append_entry(
+ cr, tfile, IndexObject.NULL_BIN_SHA, binsha, msg
+ )
assert entry.oldhexsha == IndexObject.NULL_HEX_SHA
- assert entry.newhexsha == 'f' * 40
+ assert entry.newhexsha == "f" * 40
assert entry.message == msg
assert RefLog.from_file(tfile)[-1] == entry
diff --git a/test/test_refs.py b/test/test_refs.py
index ab760a6f..ee4ec86f 100644
--- a/test/test_refs.py
+++ b/test/test_refs.py
@@ -14,13 +14,10 @@ from git import (
Commit,
SymbolicReference,
GitCommandError,
- RefLog
+ RefLog,
)
from git.objects.tag import TagObject
-from test.lib import (
- TestBase,
- with_rw_repo
-)
+from test.lib import TestBase, with_rw_repo
from git.util import Actor
import git.refs as refs
@@ -28,11 +25,10 @@ import os.path as osp
class TestRefs(TestBase):
-
def test_from_path(self):
# should be able to create any reference directly
for ref_type in (Reference, Head, TagReference, RemoteReference):
- for name in ('rela_name', 'path/rela_name'):
+ for name in ("rela_name", "path/rela_name"):
full_path = ref_type.to_full_path(name)
instance = ref_type.from_path(self.rorepo, full_path)
assert isinstance(instance, ref_type)
@@ -54,7 +50,7 @@ class TestRefs(TestBase):
tag_object_refs.append(tag)
tagobj = tag.tag
# have no dict
- self.assertRaises(AttributeError, setattr, tagobj, 'someattr', 1)
+ self.assertRaises(AttributeError, setattr, tagobj, "someattr", 1)
assert isinstance(tagobj, TagObject)
assert tagobj.tag == tag.name
assert isinstance(tagobj.tagger, Actor)
@@ -63,18 +59,18 @@ class TestRefs(TestBase):
assert tagobj.message
assert tag.object == tagobj
# can't assign the object
- self.assertRaises(AttributeError, setattr, tag, 'object', tagobj)
+ self.assertRaises(AttributeError, setattr, tag, "object", tagobj)
# END if we have a tag object
# END for tag in repo-tags
assert tag_object_refs
- assert isinstance(self.rorepo.tags['0.1.5'], TagReference)
+ assert isinstance(self.rorepo.tags["0.1.5"], TagReference)
def test_tags_author(self):
tag = self.rorepo.tags[0]
tagobj = tag.tag
assert isinstance(tagobj.tagger, Actor)
tagger_name = tagobj.tagger.name
- assert tagger_name == 'Michael Trier'
+ assert tagger_name == "Michael Trier"
def test_tags(self):
# tag refs can point to tag objects or to commits
@@ -92,7 +88,7 @@ class TestRefs(TestBase):
assert len(s) == ref_count
assert len(s | s) == ref_count
- @with_rw_repo('HEAD', bare=False)
+ @with_rw_repo("HEAD", bare=False)
def test_heads(self, rwrepo):
for head in rwrepo.heads:
assert head.name
@@ -100,8 +96,8 @@ class TestRefs(TestBase):
assert "refs/heads" in head.path
prev_object = head.object
cur_object = head.object
- assert prev_object == cur_object # represent the same git object
- assert prev_object is not cur_object # but are different instances
+ assert prev_object == cur_object # represent the same git object
+ assert prev_object is not cur_object # but are different instances
with head.config_writer() as writer:
tv = "testopt"
@@ -120,17 +116,23 @@ class TestRefs(TestBase):
head.set_tracking_branch(None)
assert head.tracking_branch() is None
- special_name = 'feature#123'
- special_name_remote_ref = SymbolicReference.create(rwrepo, 'refs/remotes/origin/%s' % special_name)
- gp_tracking_branch = rwrepo.create_head('gp_tracking#123')
- special_name_remote_ref = rwrepo.remotes[0].refs[special_name] # get correct type
+ special_name = "feature#123"
+ special_name_remote_ref = SymbolicReference.create(
+ rwrepo, "refs/remotes/origin/%s" % special_name
+ )
+ gp_tracking_branch = rwrepo.create_head("gp_tracking#123")
+ special_name_remote_ref = rwrepo.remotes[0].refs[
+ special_name
+ ] # get correct type
gp_tracking_branch.set_tracking_branch(special_name_remote_ref)
TBranch = gp_tracking_branch.tracking_branch()
if TBranch is not None:
assert TBranch.path == special_name_remote_ref.path
- git_tracking_branch = rwrepo.create_head('git_tracking#123')
- rwrepo.git.branch('-u', special_name_remote_ref.name, git_tracking_branch.name)
+ git_tracking_branch = rwrepo.create_head("git_tracking#123")
+ rwrepo.git.branch(
+ "-u", special_name_remote_ref.name, git_tracking_branch.name
+ )
TBranch = gp_tracking_branch.tracking_branch()
if TBranch is not None:
assert TBranch.name == special_name_remote_ref.name
@@ -143,7 +145,7 @@ class TestRefs(TestBase):
pcommit = cur_head.commit.parents[0].parents[0]
hlog_len = len(head.log())
blog_len = len(cur_head.log())
- assert head.set_reference(pcommit, 'detached head') is head
+ assert head.set_reference(pcommit, "detached head") is head
# one new log-entry
thlog = head.log()
assert len(thlog) == hlog_len + 1
@@ -154,23 +156,25 @@ class TestRefs(TestBase):
assert len(cur_head.log()) == blog_len
# head changes once again, cur_head doesn't change
- head.set_reference(cur_head, 'reattach head')
+ head.set_reference(cur_head, "reattach head")
assert len(head.log()) == hlog_len + 2
assert len(cur_head.log()) == blog_len
# adjusting the head-ref also adjust the head, so both reflogs are
# altered
- cur_head.set_commit(pcommit, 'changing commit')
+ cur_head.set_commit(pcommit, "changing commit")
assert len(cur_head.log()) == blog_len + 1
assert len(head.log()) == hlog_len + 3
# with automatic dereferencing
- assert head.set_commit(cur_commit, 'change commit once again') is head
+ assert head.set_commit(cur_commit, "change commit once again") is head
assert len(head.log()) == hlog_len + 4
assert len(cur_head.log()) == blog_len + 2
# a new branch has just a single entry
- other_head = Head.create(rwrepo, 'mynewhead', pcommit, logmsg='new head created')
+ other_head = Head.create(
+ rwrepo, "mynewhead", pcommit, logmsg="new head created"
+ )
log = other_head.log()
assert len(log) == 1
assert log[0].oldhexsha == pcommit.NULL_HEX_SHA
@@ -183,21 +187,21 @@ class TestRefs(TestBase):
assert len(types_found) >= 3
def test_is_valid(self):
- assert not Reference(self.rorepo, 'refs/doesnt/exist').is_valid()
+ assert not Reference(self.rorepo, "refs/doesnt/exist").is_valid()
assert self.rorepo.head.is_valid()
assert self.rorepo.head.reference.is_valid()
- assert not SymbolicReference(self.rorepo, 'hellothere').is_valid()
+ assert not SymbolicReference(self.rorepo, "hellothere").is_valid()
def test_orig_head(self):
assert type(self.rorepo.head.orig_head()) == SymbolicReference
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_head_checkout_detached_head(self, rw_repo):
res = rw_repo.remotes.origin.refs.master.checkout()
assert isinstance(res, SymbolicReference)
- assert res.name == 'HEAD'
+ assert res.name == "HEAD"
- @with_rw_repo('0.1.6')
+ @with_rw_repo("0.1.6")
def test_head_reset(self, rw_repo):
cur_head = rw_repo.head
old_head_commit = cur_head.commit
@@ -205,7 +209,9 @@ class TestRefs(TestBase):
cur_head.reset(new_head_commit, index=True) # index only
assert cur_head.reference.commit == new_head_commit
- self.assertRaises(ValueError, cur_head.reset, new_head_commit, index=False, working_tree=True)
+ self.assertRaises(
+ ValueError, cur_head.reset, new_head_commit, index=False, working_tree=True
+ )
new_head_commit = new_head_commit.parents[0]
cur_head.reset(new_head_commit, index=True, working_tree=True) # index + wt
assert cur_head.reference.commit == new_head_commit
@@ -215,7 +221,13 @@ class TestRefs(TestBase):
cur_head.reset(cur_head, paths="test")
cur_head.reset(new_head_commit, paths="lib")
# hard resets with paths don't work, its all or nothing
- self.assertRaises(GitCommandError, cur_head.reset, new_head_commit, working_tree=True, paths="lib")
+ self.assertRaises(
+ GitCommandError,
+ cur_head.reset,
+ new_head_commit,
+ working_tree=True,
+ paths="lib",
+ )
# we can do a mixed reset, and then checkout from the index though
cur_head.reset(new_head_commit)
@@ -255,7 +267,7 @@ class TestRefs(TestBase):
self.assertRaises(ValueError, setattr, cur_head, "reference", "that")
# head handling
- commit = 'HEAD'
+ commit = "HEAD"
prev_head_commit = cur_head.commit
for count, new_name in enumerate(("my_new_head", "feature/feature1")):
actual_commit = commit + "^" * count
@@ -267,7 +279,9 @@ class TestRefs(TestBase):
Head.create(rw_repo, new_name, new_head.commit)
# its not fine with a different value
- self.assertRaises(OSError, Head.create, rw_repo, new_name, new_head.commit.parents[0])
+ self.assertRaises(
+ OSError, Head.create, rw_repo, new_name, new_head.commit.parents[0]
+ )
# force it
new_head = Head.create(rw_repo, new_name, actual_commit, force=True)
@@ -276,7 +290,9 @@ class TestRefs(TestBase):
assert new_head.rename("hello").name == "hello"
assert new_head.rename("hello/world").name == "hello/world"
- assert new_head.rename(old_name).name == old_name and new_head.path == old_path
+ assert (
+ new_head.rename(old_name).name == old_name and new_head.path == old_path
+ )
# rename with force
tmp_head = Head.create(rw_repo, "tmphead")
@@ -330,7 +346,7 @@ class TestRefs(TestBase):
remote_head_name = "HEAD"
if remote_head_name in refs:
RemoteReference.delete(rw_repo, refs[remote_head_name])
- del(refs[remote_head_name])
+ del refs[remote_head_name]
# END handle HEAD deletion
RemoteReference.delete(rw_repo, *refs)
@@ -358,13 +374,13 @@ class TestRefs(TestBase):
# setting a non-commit as commit fails, but succeeds as object
head_tree = head.commit.tree
- self.assertRaises(ValueError, setattr, head, 'commit', head_tree)
- assert head.commit == old_commit # and the ref did not change
+ self.assertRaises(ValueError, setattr, head, "commit", head_tree)
+ assert head.commit == old_commit # and the ref did not change
# we allow heds to point to any object
head.object = head_tree
assert head.object == head_tree
# cannot query tree as commit
- self.assertRaises(TypeError, getattr, head, 'commit')
+ self.assertRaises(TypeError, getattr, head, "commit")
# set the commit directly using the head. This would never detach the head
assert not cur_head.is_detached
@@ -396,25 +412,25 @@ class TestRefs(TestBase):
# checkout with force as we have a changed a file
# clear file
- open(new_head.commit.tree.blobs[-1].abspath, 'w').close()
+ open(new_head.commit.tree.blobs[-1].abspath, "w").close()
assert len(new_head.commit.diff(None))
# create a new branch that is likely to touch the file we changed
- far_away_head = rw_repo.create_head("far_head", 'HEAD~100')
+ far_away_head = rw_repo.create_head("far_head", "HEAD~100")
self.assertRaises(GitCommandError, far_away_head.checkout)
assert active_branch == active_branch.checkout(force=True)
assert rw_repo.head.reference != far_away_head
# test reference creation
- partial_ref = 'sub/ref'
- full_ref = 'refs/%s' % partial_ref
+ partial_ref = "sub/ref"
+ full_ref = "refs/%s" % partial_ref
ref = Reference.create(rw_repo, partial_ref)
assert ref.path == full_ref
assert ref.object == rw_repo.head.commit
- self.assertRaises(OSError, Reference.create, rw_repo, full_ref, 'HEAD~20')
+ self.assertRaises(OSError, Reference.create, rw_repo, full_ref, "HEAD~20")
# it works if it is at the same spot though and points to the same reference
- assert Reference.create(rw_repo, full_ref, 'HEAD').path == full_ref
+ assert Reference.create(rw_repo, full_ref, "HEAD").path == full_ref
Reference.delete(rw_repo, full_ref)
# recreate the reference using a full_ref
@@ -423,13 +439,13 @@ class TestRefs(TestBase):
assert ref.object == rw_repo.head.commit
# recreate using force
- ref = Reference.create(rw_repo, partial_ref, 'HEAD~1', force=True)
+ ref = Reference.create(rw_repo, partial_ref, "HEAD~1", force=True)
assert ref.path == full_ref
assert ref.object == rw_repo.head.commit.parents[0]
# rename it
orig_obj = ref.object
- for name in ('refs/absname', 'rela_name', 'feature/rela_name'):
+ for name in ("refs/absname", "rela_name", "feature/rela_name"):
ref_new_name = ref.rename(name)
assert isinstance(ref_new_name, Reference)
assert name in ref_new_name.path
@@ -438,7 +454,9 @@ class TestRefs(TestBase):
# END for each name type
# References that don't exist trigger an error if we want to access them
- self.assertRaises(ValueError, getattr, Reference(rw_repo, "refs/doesntexist"), 'commit')
+ self.assertRaises(
+ ValueError, getattr, Reference(rw_repo, "refs/doesntexist"), "commit"
+ )
# exists, fail unless we force
ex_ref_path = far_away_head.path
@@ -455,9 +473,18 @@ class TestRefs(TestBase):
assert symref.path == symref_path
assert symref.reference == cur_head.reference
- self.assertRaises(OSError, SymbolicReference.create, rw_repo, symref_path, cur_head.reference.commit)
+ self.assertRaises(
+ OSError,
+ SymbolicReference.create,
+ rw_repo,
+ symref_path,
+ cur_head.reference.commit,
+ )
# it works if the new ref points to the same reference
- assert SymbolicReference.create(rw_repo, symref.path, symref.reference).path == symref.path # @NoEffect
+ assert (
+ SymbolicReference.create(rw_repo, symref.path, symref.reference).path
+ == symref.path
+ ) # @NoEffect
SymbolicReference.delete(rw_repo, symref)
# would raise if the symref wouldn't have been deletedpbl
symref = SymbolicReference.create(rw_repo, symref_path, cur_head.reference)
@@ -475,7 +502,7 @@ class TestRefs(TestBase):
assert osp.isfile(symbol_ref_abspath)
assert symref.commit == new_head.commit
- for name in ('absname', 'folder/rela_name'):
+ for name in ("absname", "folder/rela_name"):
symref_new_name = symref.rename(name)
assert isinstance(symref_new_name, SymbolicReference)
assert name in symref_new_name.path
@@ -524,7 +551,7 @@ class TestRefs(TestBase):
rw_repo.head.reference = Head.create(rw_repo, "master")
# At least the head should still exist
- assert osp.isfile(osp.join(rw_repo.git_dir, 'HEAD'))
+ assert osp.isfile(osp.join(rw_repo.git_dir, "HEAD"))
refs = list(SymbolicReference.iter_items(rw_repo))
assert len(refs) == 1
@@ -545,7 +572,7 @@ class TestRefs(TestBase):
# if the assignment raises, the ref doesn't exist
Reference.delete(ref.repo, ref.path)
assert not ref.is_valid()
- self.assertRaises(ValueError, setattr, ref, 'commit', "nonsense")
+ self.assertRaises(ValueError, setattr, ref, "commit", "nonsense")
assert not ref.is_valid()
# I am sure I had my reason to make it a class method at first, but
@@ -559,14 +586,14 @@ class TestRefs(TestBase):
Reference.delete(ref.repo, ref.path)
assert not ref.is_valid()
- self.assertRaises(ValueError, setattr, ref, 'object', "nonsense")
+ self.assertRaises(ValueError, setattr, ref, "object", "nonsense")
assert not ref.is_valid()
# END for each path
def test_dereference_recursive(self):
# for now, just test the HEAD
- assert SymbolicReference.dereference_recursive(self.rorepo, 'HEAD')
+ assert SymbolicReference.dereference_recursive(self.rorepo, "HEAD")
def test_reflog(self):
assert isinstance(self.rorepo.heads.master.log(), RefLog)
diff --git a/test/test_remote.py b/test/test_remote.py
index 761a7a3e..53f71e3d 100644
--- a/test/test_remote.py
+++ b/test/test_remote.py
@@ -20,7 +20,7 @@ from git import (
RemoteReference,
TagReference,
Remote,
- GitCommandError
+ GitCommandError,
)
from git.cmd import Git
from test.lib import (
@@ -28,7 +28,7 @@ from test.lib import (
with_rw_repo,
with_rw_and_rw_remote_repo,
fixture,
- GIT_DAEMON_PORT
+ GIT_DAEMON_PORT,
)
from git.util import rmtree, HIDE_WINDOWS_FREEZE_ERRORS, IterableList
import os.path as osp
@@ -39,7 +39,7 @@ random.seed(0)
class TestRemoteProgress(RemoteProgress):
- __slots__ = ("_seen_lines", "_stages_per_op", '_num_progress_messages')
+ __slots__ = ("_seen_lines", "_stages_per_op", "_num_progress_messages")
def __init__(self):
super(TestRemoteProgress, self).__init__()
@@ -60,21 +60,27 @@ class TestRemoteProgress(RemoteProgress):
except ValueError:
pass
- def update(self, op_code, cur_count, max_count=None, message=''):
+ def update(self, op_code, cur_count, max_count=None, message=""):
# check each stage only comes once
op_id = op_code & self.OP_MASK
assert op_id in (self.COUNTING, self.COMPRESSING, self.WRITING)
if op_code & self.WRITING > 0:
if op_code & self.BEGIN > 0:
- assert not message, 'should not have message when remote begins writing'
+ assert not message, "should not have message when remote begins writing"
elif op_code & self.END > 0:
assert message
- assert not message.startswith(', '), "Sanitize progress messages: '%s'" % message
- assert not message.endswith(', '), "Sanitize progress messages: '%s'" % message
+ assert not message.startswith(", "), (
+ "Sanitize progress messages: '%s'" % message
+ )
+ assert not message.endswith(", "), (
+ "Sanitize progress messages: '%s'" % message
+ )
self._stages_per_op.setdefault(op_id, 0)
- self._stages_per_op[op_id] = self._stages_per_op[op_id] | (op_code & self.STAGE_MASK)
+ self._stages_per_op[op_id] = self._stages_per_op[op_id] | (
+ op_code & self.STAGE_MASK
+ )
if op_code & (self.WRITING | self.END) == (self.WRITING | self.END):
assert message
@@ -101,9 +107,9 @@ class TestRemoteProgress(RemoteProgress):
class TestRemote(TestBase):
-
def tearDown(self):
import gc
+
gc.collect()
def _print_fetchhead(self, repo):
@@ -140,7 +146,11 @@ class TestRemote(TestBase):
self.assertIsInstance(info.old_commit, Commit)
if info.flags & info.ERROR:
has_one = False
- for bitflag in (info.REJECTED, info.REMOTE_REJECTED, info.REMOTE_FAILURE):
+ for bitflag in (
+ info.REJECTED,
+ info.REMOTE_REJECTED,
+ info.REMOTE_FAILURE,
+ ):
has_one |= bool(info.flags & bitflag)
# END for each bitflag
self.assertTrue(has_one)
@@ -161,15 +171,22 @@ class TestRemote(TestBase):
results.raise_if_error()
def _do_test_fetch_info(self, repo):
- self.assertRaises(ValueError, FetchInfo._from_line, repo, "nonsense", '')
+ self.assertRaises(ValueError, FetchInfo._from_line, repo, "nonsense", "")
self.assertRaises(
- ValueError, FetchInfo._from_line, repo, "? [up to date] 0.1.7RC -> origin/0.1.7RC", '')
+ ValueError,
+ FetchInfo._from_line,
+ repo,
+ "? [up to date] 0.1.7RC -> origin/0.1.7RC",
+ "",
+ )
def _commit_random_file(self, repo):
# Create a file with a random name and random data and commit it to repo.
# Return the committed absolute file path
index = repo.index
- new_file = self._make_file(osp.basename(tempfile.mktemp()), str(random.random()), repo)
+ new_file = self._make_file(
+ osp.basename(tempfile.mktemp()), str(random.random()), repo
+ )
index.add([new_file])
index.commit("Committing %s" % new_file)
return new_file
@@ -180,11 +197,12 @@ class TestRemote(TestBase):
def fetch_and_test(remote, **kwargs):
progress = TestRemoteProgress()
- kwargs['progress'] = progress
+ kwargs["progress"] = progress
res = remote.fetch(**kwargs)
progress.make_assertion()
self._do_test_fetch_result(res, remote)
return res
+
# END fetch and check
def get_info(res, remote, name):
@@ -204,7 +222,7 @@ class TestRemote(TestBase):
remote_commit = rhead.commit
rhead.reset("HEAD~2", index=False)
res = fetch_and_test(remote)
- mkey = "%s/%s" % (remote, 'master')
+ mkey = "%s/%s" % (remote, "master")
master_info = res[mkey]
self.assertTrue(master_info.flags & FetchInfo.FORCED_UPDATE)
self.assertIsNotNone(master_info.note)
@@ -241,10 +259,10 @@ class TestRemote(TestBase):
# test single branch fetch with refspec including target remote
res = fetch_and_test(remote, refspec="master:refs/remotes/%s/master" % remote)
self.assertEqual(len(res), 1)
- self.assertTrue(get_info(res, remote, 'master'))
+ self.assertTrue(get_info(res, remote, "master"))
# ... with respec and no target
- res = fetch_and_test(remote, refspec='master')
+ res = fetch_and_test(remote, refspec="master")
self.assertEqual(len(res), 1)
# ... multiple refspecs ... works, but git command returns with error if one ref is wrong without
@@ -286,8 +304,12 @@ class TestRemote(TestBase):
# must clone with a local path for the repo implementation not to freak out
# as it wants local paths only ( which I can understand )
other_repo = remote_repo.clone(other_repo_dir, shared=False)
- remote_repo_url = osp.basename(remote_repo.git_dir) # git-daemon runs with appropriate `--base-path`.
- remote_repo_url = Git.polish_url("git://localhost:%s/%s" % (GIT_DAEMON_PORT, remote_repo_url))
+ remote_repo_url = osp.basename(
+ remote_repo.git_dir
+ ) # git-daemon runs with appropriate `--base-path`.
+ remote_repo_url = Git.polish_url(
+ "git://localhost:%s/%s" % (GIT_DAEMON_PORT, remote_repo_url)
+ )
# put origin to git-url
other_origin = other_repo.remotes.origin
@@ -321,7 +343,7 @@ class TestRemote(TestBase):
except AttributeError:
# if the author is on a non-master branch, the clones might not have
# a local master yet. We simply create it
- lhead.reference = rw_repo.create_head('master')
+ lhead.reference = rw_repo.create_head("master")
# END master handling
lhead.reset(remote.refs.master, working_tree=True)
@@ -345,7 +367,7 @@ class TestRemote(TestBase):
self._do_test_push_result(res, remote)
# force rejected pull
- res = remote.push('+%s' % lhead.reference)
+ res = remote.push("+%s" % lhead.reference)
self.assertEqual(res[0].flags & PushInfo.ERROR, 0)
self.assertTrue(res[0].flags & PushInfo.FORCED_UPDATE)
self._do_test_push_result(res, remote)
@@ -357,7 +379,9 @@ class TestRemote(TestBase):
progress = TestRemoteProgress()
to_be_updated = "my_tag.1.0RV"
new_tag = TagReference.create(rw_repo, to_be_updated) # @UnusedVariable
- other_tag = TagReference.create(rw_repo, "my_obj_tag.2.1aRV", logmsg="my message")
+ other_tag = TagReference.create(
+ rw_repo, "my_obj_tag.2.1aRV", logmsg="my message"
+ )
res = remote.push(progress=progress, tags=True)
self.assertTrue(res[-1].flags & PushInfo.NEW_TAG)
progress.make_assertion()
@@ -365,7 +389,9 @@ class TestRemote(TestBase):
# update push new tags
# Rejection is default
- new_tag = TagReference.create(rw_repo, to_be_updated, reference='HEAD~1', force=True)
+ new_tag = TagReference.create(
+ rw_repo, to_be_updated, reference="HEAD~1", force=True
+ )
res = remote.push(tags=True)
self._do_test_push_result(res, remote)
self.assertTrue(res[-1].flags & PushInfo.REJECTED)
@@ -411,7 +437,7 @@ class TestRemote(TestBase):
res = remote.push(all=True)
self._do_test_push_result(res, remote)
- remote.pull('master', kill_after_timeout=10.0)
+ remote.pull("master", kill_after_timeout=10.0)
# cleanup - delete created tags and branches as we are in an innerloop on
# the same repository
@@ -419,7 +445,7 @@ class TestRemote(TestBase):
remote.push(":%s" % other_tag.path, kill_after_timeout=10.0)
@skipIf(HIDE_WINDOWS_FREEZE_ERRORS, "FIXME: Freezes!")
- @with_rw_and_rw_remote_repo('0.1.6')
+ @with_rw_and_rw_remote_repo("0.1.6")
def test_base(self, rw_repo, remote_repo):
num_remotes = 0
remote_set = set()
@@ -477,8 +503,9 @@ class TestRemote(TestBase):
# Only for remotes - local cases are the same or less complicated
# as additional progress information will never be emitted
if remote.name == "daemon_origin":
- self._do_test_fetch(remote, rw_repo, remote_repo,
- kill_after_timeout=10.0)
+ self._do_test_fetch(
+ remote, rw_repo, remote_repo, kill_after_timeout=10.0
+ )
ran_fetch_test = True
# END fetch test
@@ -489,7 +516,7 @@ class TestRemote(TestBase):
self.assertTrue(num_remotes)
self.assertEqual(num_remotes, len(remote_set))
- origin = rw_repo.remote('origin')
+ origin = rw_repo.remote("origin")
assert origin == rw_repo.remotes.origin
# Verify we can handle prunes when fetching
@@ -502,15 +529,19 @@ class TestRemote(TestBase):
num_deleted = False
for branch in remote_repo.heads:
- if branch.name != 'master':
+ if branch.name != "master":
branch.delete(remote_repo, branch, force=True)
num_deleted += 1
# end
# end for each branch
self.assertGreater(num_deleted, 0)
- self.assertEqual(len(rw_repo.remotes.origin.fetch(prune=True)), 1, "deleted everything but master")
+ self.assertEqual(
+ len(rw_repo.remotes.origin.fetch(prune=True)),
+ 1,
+ "deleted everything but master",
+ )
- @with_rw_repo('HEAD', bare=True)
+ @with_rw_repo("HEAD", bare=True)
def test_creation_and_removal(self, bare_rw_repo):
new_name = "test_new_one"
arg_list = (new_name, "git@server:hello.git")
@@ -523,7 +554,9 @@ class TestRemote(TestBase):
self.assertRaises(GitCommandError, Remote.create, bare_rw_repo, *arg_list)
Remote.remove(bare_rw_repo, new_name)
- self.assertTrue(remote.exists()) # We still have a cache that doesn't know we were deleted by name
+ self.assertTrue(
+ remote.exists()
+ ) # We still have a cache that doesn't know we were deleted by name
remote._clear_cache()
assert not remote.exists() # Cache should be renewed now. This is an issue ...
@@ -534,86 +567,108 @@ class TestRemote(TestBase):
# END for each remote
# Issue #262 - the next call would fail if bug wasn't fixed
- bare_rw_repo.create_remote('bogus', '/bogus/path', mirror='push')
+ bare_rw_repo.create_remote("bogus", "/bogus/path", mirror="push")
def test_fetch_info(self):
# assure we can handle remote-tracking branches
- fetch_info_line_fmt = "c437ee5deb8d00cf02f03720693e4c802e99f390 not-for-merge %s '0.3' of "
+ fetch_info_line_fmt = (
+ "c437ee5deb8d00cf02f03720693e4c802e99f390 not-for-merge %s '0.3' of "
+ )
fetch_info_line_fmt += "git://github.com/gitpython-developers/GitPython"
remote_info_line_fmt = "* [new branch] nomatter -> %s"
- self.assertRaises(ValueError, FetchInfo._from_line, self.rorepo,
- remote_info_line_fmt % "refs/something/branch",
- "269c498e56feb93e408ed4558c8138d750de8893\t\t/Users/ben/test/foo\n")
-
- fi = FetchInfo._from_line(self.rorepo,
- remote_info_line_fmt % "local/master",
- fetch_info_line_fmt % 'remote-tracking branch')
+ self.assertRaises(
+ ValueError,
+ FetchInfo._from_line,
+ self.rorepo,
+ remote_info_line_fmt % "refs/something/branch",
+ "269c498e56feb93e408ed4558c8138d750de8893\t\t/Users/ben/test/foo\n",
+ )
+
+ fi = FetchInfo._from_line(
+ self.rorepo,
+ remote_info_line_fmt % "local/master",
+ fetch_info_line_fmt % "remote-tracking branch",
+ )
assert not fi.ref.is_valid()
self.assertEqual(fi.ref.name, "local/master")
# handles non-default refspecs: One can specify a different path in refs/remotes
# or a special path just in refs/something for instance
- fi = FetchInfo._from_line(self.rorepo,
- remote_info_line_fmt % "subdir/tagname",
- fetch_info_line_fmt % 'tag')
+ fi = FetchInfo._from_line(
+ self.rorepo,
+ remote_info_line_fmt % "subdir/tagname",
+ fetch_info_line_fmt % "tag",
+ )
self.assertIsInstance(fi.ref, TagReference)
- assert fi.ref.path.startswith('refs/tags'), fi.ref.path
+ assert fi.ref.path.startswith("refs/tags"), fi.ref.path
# it could be in a remote direcftory though
- fi = FetchInfo._from_line(self.rorepo,
- remote_info_line_fmt % "remotename/tags/tagname",
- fetch_info_line_fmt % 'tag')
+ fi = FetchInfo._from_line(
+ self.rorepo,
+ remote_info_line_fmt % "remotename/tags/tagname",
+ fetch_info_line_fmt % "tag",
+ )
self.assertIsInstance(fi.ref, TagReference)
- assert fi.ref.path.startswith('refs/remotes/'), fi.ref.path
+ assert fi.ref.path.startswith("refs/remotes/"), fi.ref.path
# it can also be anywhere !
tag_path = "refs/something/remotename/tags/tagname"
- fi = FetchInfo._from_line(self.rorepo,
- remote_info_line_fmt % tag_path,
- fetch_info_line_fmt % 'tag')
+ fi = FetchInfo._from_line(
+ self.rorepo, remote_info_line_fmt % tag_path, fetch_info_line_fmt % "tag"
+ )
self.assertIsInstance(fi.ref, TagReference)
self.assertEqual(fi.ref.path, tag_path)
# branches default to refs/remotes
- fi = FetchInfo._from_line(self.rorepo,
- remote_info_line_fmt % "remotename/branch",
- fetch_info_line_fmt % 'branch')
+ fi = FetchInfo._from_line(
+ self.rorepo,
+ remote_info_line_fmt % "remotename/branch",
+ fetch_info_line_fmt % "branch",
+ )
self.assertIsInstance(fi.ref, RemoteReference)
- self.assertEqual(fi.ref.remote_name, 'remotename')
+ self.assertEqual(fi.ref.remote_name, "remotename")
# but you can force it anywhere, in which case we only have a references
- fi = FetchInfo._from_line(self.rorepo,
- remote_info_line_fmt % "refs/something/branch",
- fetch_info_line_fmt % 'branch')
+ fi = FetchInfo._from_line(
+ self.rorepo,
+ remote_info_line_fmt % "refs/something/branch",
+ fetch_info_line_fmt % "branch",
+ )
assert type(fi.ref) is Reference, type(fi.ref)
self.assertEqual(fi.ref.path, "refs/something/branch")
def test_uncommon_branch_names(self):
- stderr_lines = fixture('uncommon_branch_prefix_stderr').decode('ascii').splitlines()
- fetch_lines = fixture('uncommon_branch_prefix_FETCH_HEAD').decode('ascii').splitlines()
+ stderr_lines = (
+ fixture("uncommon_branch_prefix_stderr").decode("ascii").splitlines()
+ )
+ fetch_lines = (
+ fixture("uncommon_branch_prefix_FETCH_HEAD").decode("ascii").splitlines()
+ )
# The contents of the files above must be fetched with a custom refspec:
# +refs/pull/*:refs/heads/pull/*
- res = [FetchInfo._from_line('ShouldntMatterRepo', stderr, fetch_line)
- for stderr, fetch_line in zip(stderr_lines, fetch_lines)]
+ res = [
+ FetchInfo._from_line("ShouldntMatterRepo", stderr, fetch_line)
+ for stderr, fetch_line in zip(stderr_lines, fetch_lines)
+ ]
self.assertGreater(len(res), 0)
- self.assertEqual(res[0].remote_ref_path, 'refs/pull/1/head')
- self.assertEqual(res[0].ref.path, 'refs/heads/pull/1/head')
+ self.assertEqual(res[0].remote_ref_path, "refs/pull/1/head")
+ self.assertEqual(res[0].ref.path, "refs/heads/pull/1/head")
self.assertIsInstance(res[0].ref, Head)
- @with_rw_repo('HEAD', bare=False)
+ @with_rw_repo("HEAD", bare=False)
def test_multiple_urls(self, rw_repo):
# test addresses
- test1 = 'https://github.com/gitpython-developers/GitPython'
- test2 = 'https://github.com/gitpython-developers/gitdb'
- test3 = 'https://github.com/gitpython-developers/smmap'
+ test1 = "https://github.com/gitpython-developers/GitPython"
+ test2 = "https://github.com/gitpython-developers/gitdb"
+ test3 = "https://github.com/gitpython-developers/smmap"
remote = rw_repo.remotes[0]
# Testing setting a single URL
@@ -639,7 +694,7 @@ class TestRemote(TestBase):
self.assertRaises(GitCommandError, remote.set_url, test2, add=True, delete=True)
# Testing on another remote, with the add/delete URL
- remote = rw_repo.create_remote('another', url=test1)
+ remote = rw_repo.create_remote("another", url=test1)
remote.add_url(test2)
self.assertEqual(list(remote.urls), [test1, test2])
remote.add_url(test3)
@@ -653,19 +708,23 @@ class TestRemote(TestBase):
self.assertRaises(GitCommandError, remote.delete_url, test3)
def test_fetch_error(self):
- rem = self.rorepo.remote('origin')
- with self.assertRaisesRegex(GitCommandError, "[Cc]ouldn't find remote ref __BAD_REF__"):
- rem.fetch('__BAD_REF__')
+ rem = self.rorepo.remote("origin")
+ with self.assertRaisesRegex(
+ GitCommandError, "[Cc]ouldn't find remote ref __BAD_REF__"
+ ):
+ rem.fetch("__BAD_REF__")
- @with_rw_repo('0.1.6', bare=False)
+ @with_rw_repo("0.1.6", bare=False)
def test_push_error(self, repo):
- rem = repo.remote('origin')
- with self.assertRaisesRegex(GitCommandError, "src refspec __BAD_REF__ does not match any"):
- rem.push('__BAD_REF__')
+ rem = repo.remote("origin")
+ with self.assertRaisesRegex(
+ GitCommandError, "src refspec __BAD_REF__ does not match any"
+ ):
+ rem.push("__BAD_REF__")
class TestTimeouts(TestBase):
- @with_rw_repo('HEAD', bare=False)
+ @with_rw_repo("HEAD", bare=False)
def test_timeout_funcs(self, repo):
# Force error code to prevent a race condition if the python thread is
# slow
@@ -675,8 +734,7 @@ class TestTimeouts(TestBase):
f = getattr(repo.remotes.origin, function)
assert f is not None # Make sure these functions exist
_ = f() # Make sure the function runs
- with pytest.raises(GitCommandError,
- match="kill_after_timeout=0 s"):
+ with pytest.raises(GitCommandError, match="kill_after_timeout=0 s"):
f(kill_after_timeout=0)
Git.AutoInterrupt._status_code_if_terminate = default
diff --git a/test/test_repo.py b/test/test_repo.py
index c5b2680d..7cffbbd8 100644
--- a/test/test_repo.py
+++ b/test/test_repo.py
@@ -30,17 +30,13 @@ from git import (
GitCmdObjectDB,
Remote,
BadName,
- GitCommandError
+ GitCommandError,
)
from git.exc import (
BadObject,
)
from git.repo.fun import touch
-from test.lib import (
- TestBase,
- with_rw_repo,
- fixture
-)
+from test.lib import TestBase, with_rw_repo, fixture
from git.util import HIDE_WINDOWS_KNOWN_ERRORS, cygpath
from test.lib import with_rw_directory
from git.util import join_path_native, rmtree, rmfile, bin_to_hex
@@ -58,7 +54,7 @@ def flatten(lol):
return list(iter_flatten(lol))
-_tc_lock_fpaths = osp.join(osp.dirname(__file__), '../../.git/*.lock')
+_tc_lock_fpaths = osp.join(osp.dirname(__file__), "../../.git/*.lock")
def _rm_lock_files():
@@ -67,15 +63,17 @@ def _rm_lock_files():
class TestRepo(TestBase):
-
def setUp(self):
_rm_lock_files()
def tearDown(self):
for lfp in glob.glob(_tc_lock_fpaths):
if osp.isfile(lfp):
- raise AssertionError('Previous TC left hanging git-lock file: {}'.format(lfp))
+ raise AssertionError(
+ "Previous TC left hanging git-lock file: {}".format(lfp)
+ )
import gc
+
gc.collect()
def test_new_should_raise_on_invalid_repo_location(self):
@@ -84,15 +82,15 @@ class TestRepo(TestBase):
def test_new_should_raise_on_non_existent_path(self):
self.assertRaises(NoSuchPathError, Repo, "repos/foobar")
- @with_rw_repo('0.3.2.1')
+ @with_rw_repo("0.3.2.1")
def test_repo_creation_from_different_paths(self, rw_repo):
r_from_gitdir = Repo(rw_repo.git_dir)
self.assertEqual(r_from_gitdir.git_dir, rw_repo.git_dir)
- assert r_from_gitdir.git_dir.endswith('.git')
- assert not rw_repo.git.working_dir.endswith('.git')
+ assert r_from_gitdir.git_dir.endswith(".git")
+ assert not rw_repo.git.working_dir.endswith(".git")
self.assertEqual(r_from_gitdir.git.working_dir, rw_repo.git.working_dir)
- @with_rw_repo('0.3.2.1')
+ @with_rw_repo("0.3.2.1")
def test_repo_creation_pathlib(self, rw_repo):
r_from_gitdir = Repo(pathlib.Path(rw_repo.git_dir))
self.assertEqual(r_from_gitdir.git_dir, rw_repo.git_dir)
@@ -113,33 +111,35 @@ class TestRepo(TestBase):
# END for each head
self.assertIsInstance(self.rorepo.heads.master, Head)
- self.assertIsInstance(self.rorepo.heads['master'], Head)
+ self.assertIsInstance(self.rorepo.heads["master"], Head)
def test_tree_from_revision(self):
- tree = self.rorepo.tree('0.1.6')
+ tree = self.rorepo.tree("0.1.6")
self.assertEqual(len(tree.hexsha), 40)
self.assertEqual(tree.type, "tree")
self.assertEqual(self.rorepo.tree(tree), tree)
# try from invalid revision that does not exist
- self.assertRaises(BadName, self.rorepo.tree, 'hello world')
+ self.assertRaises(BadName, self.rorepo.tree, "hello world")
def test_pickleable(self):
pickle.loads(pickle.dumps(self.rorepo))
def test_commit_from_revision(self):
- commit = self.rorepo.commit('0.1.4')
- self.assertEqual(commit.type, 'commit')
+ commit = self.rorepo.commit("0.1.4")
+ self.assertEqual(commit.type, "commit")
self.assertEqual(self.rorepo.commit(commit), commit)
def test_commits(self):
mc = 10
- commits = list(self.rorepo.iter_commits('0.1.6', max_count=mc))
+ commits = list(self.rorepo.iter_commits("0.1.6", max_count=mc))
self.assertEqual(len(commits), mc)
c = commits[0]
- self.assertEqual('9a4b1d4d11eee3c5362a4152216376e634bd14cf', c.hexsha)
- self.assertEqual(["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents])
+ self.assertEqual("9a4b1d4d11eee3c5362a4152216376e634bd14cf", c.hexsha)
+ self.assertEqual(
+ ["c76852d0bff115720af3f27acdb084c59361e5f6"], [p.hexsha for p in c.parents]
+ )
self.assertEqual("ce41fc29549042f1aa09cc03174896cf23f112e3", c.tree.hexsha)
self.assertEqual("Michael Trier", c.author.name)
self.assertEqual("mtrier@gmail.com", c.author.email)
@@ -157,7 +157,7 @@ class TestRepo(TestBase):
def test_trees(self):
mc = 30
num_trees = 0
- for tree in self.rorepo.iter_trees('0.1.5', max_count=mc):
+ for tree in self.rorepo.iter_trees("0.1.5", max_count=mc):
num_trees += 1
self.assertIsInstance(tree, Tree)
# END for each tree
@@ -176,7 +176,7 @@ class TestRepo(TestBase):
assert not repo.head.is_valid()
# we can change the head to some other ref
- head_ref = Head.from_path(repo, Head.to_full_path('some_head'))
+ head_ref = Head.from_path(repo, Head.to_full_path("some_head"))
assert not head_ref.is_valid()
repo.head.ref = head_ref
@@ -195,7 +195,9 @@ class TestRepo(TestBase):
original_repo = Repo.init(osp.join(rw_dir, "repo"))
environment = {"entry1": "value", "another_entry": "10"}
- cloned = Repo.clone_from(original_repo.git_dir, osp.join(rw_dir, "clone"), env=environment)
+ cloned = Repo.clone_from(
+ original_repo.git_dir, osp.join(rw_dir, "clone"), env=environment
+ )
self.assertEqual(environment, cloned.git.environment())
@@ -215,20 +217,32 @@ class TestRepo(TestBase):
def test_clone_from_pathlib_withConfig(self, rw_dir):
original_repo = Repo.init(osp.join(rw_dir, "repo"))
- cloned = Repo.clone_from(original_repo.git_dir, pathlib.Path(rw_dir) / "clone_pathlib_withConfig",
- multi_options=["--recurse-submodules=repo",
- "--config core.filemode=false",
- "--config submodule.repo.update=checkout",
- "--config filter.lfs.clean='git-lfs clean -- %f'"])
-
- self.assertEqual(cloned.config_reader().get_value('submodule', 'active'), 'repo')
- self.assertEqual(cloned.config_reader().get_value('core', 'filemode'), False)
- self.assertEqual(cloned.config_reader().get_value('submodule "repo"', 'update'), 'checkout')
- self.assertEqual(cloned.config_reader().get_value('filter "lfs"', 'clean'), 'git-lfs clean -- %f')
+ cloned = Repo.clone_from(
+ original_repo.git_dir,
+ pathlib.Path(rw_dir) / "clone_pathlib_withConfig",
+ multi_options=[
+ "--recurse-submodules=repo",
+ "--config core.filemode=false",
+ "--config submodule.repo.update=checkout",
+ "--config filter.lfs.clean='git-lfs clean -- %f'",
+ ],
+ )
+
+ self.assertEqual(
+ cloned.config_reader().get_value("submodule", "active"), "repo"
+ )
+ self.assertEqual(cloned.config_reader().get_value("core", "filemode"), False)
+ self.assertEqual(
+ cloned.config_reader().get_value('submodule "repo"', "update"), "checkout"
+ )
+ self.assertEqual(
+ cloned.config_reader().get_value('filter "lfs"', "clean"),
+ "git-lfs clean -- %f",
+ )
def test_clone_from_with_path_contains_unicode(self):
with tempfile.TemporaryDirectory() as tmpdir:
- unicode_dir_name = '\u0394'
+ unicode_dir_name = "\u0394"
path_with_unicode = os.path.join(tmpdir, unicode_dir_name)
os.makedirs(path_with_unicode)
@@ -238,7 +252,7 @@ class TestRepo(TestBase):
to_path=path_with_unicode,
)
except UnicodeEncodeError:
- self.fail('Raised UnicodeEncodeError')
+ self.fail("Raised UnicodeEncodeError")
@with_rw_directory
def test_leaking_password_in_clone_logs(self, rw_dir):
@@ -246,16 +260,21 @@ class TestRepo(TestBase):
try:
Repo.clone_from(
url="https://fakeuser:{}@fakerepo.example.com/testrepo".format(
- password),
- to_path=rw_dir)
+ password
+ ),
+ to_path=rw_dir,
+ )
except GitCommandError as err:
- assert password not in str(err), "The error message '%s' should not contain the password" % err
+ assert password not in str(err), (
+ "The error message '%s' should not contain the password" % err
+ )
# Working example from a blank private project
Repo.clone_from(
url="https://gitlab+deploy-token-392045:mLWhVus7bjLsy8xj8q2V@gitlab.com/mercierm/test_git_python",
- to_path=rw_dir)
+ to_path=rw_dir,
+ )
- @with_rw_repo('HEAD')
+ @with_rw_repo("HEAD")
def test_max_chunk_size(self, repo):
class TestOutputStream(TestBase):
def __init__(self, max_chunk_size):
@@ -265,10 +284,20 @@ class TestRepo(TestBase):
self.assertTrue(len(b) <= self.max_chunk_size)
for chunk_size in [16, 128, 1024]:
- repo.git.status(output_stream=TestOutputStream(chunk_size), max_chunk_size=chunk_size)
-
- repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), max_chunk_size=None)
- repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE), max_chunk_size=-10)
+ repo.git.status(
+ output_stream=TestOutputStream(chunk_size), max_chunk_size=chunk_size
+ )
+
+ repo.git.log(
+ n=100,
+ output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE),
+ max_chunk_size=None,
+ )
+ repo.git.log(
+ n=100,
+ output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE),
+ max_chunk_size=-10,
+ )
repo.git.log(n=100, output_stream=TestOutputStream(io.DEFAULT_BUFFER_SIZE))
def test_init(self):
@@ -352,7 +381,7 @@ class TestRepo(TestBase):
self.rorepo.alternates = cur_alternates
def test_repr(self):
- assert repr(self.rorepo).startswith('<git.repo.base.Repo ')
+ assert repr(self.rorepo).startswith("<git.repo.base.Repo ")
def test_is_dirty_with_bare_repository(self):
orig_value = self.rorepo._bare
@@ -365,7 +394,9 @@ class TestRepo(TestBase):
for index in (0, 1):
for working_tree in (0, 1):
for untracked_files in (0, 1):
- assert self.rorepo.is_dirty(index, working_tree, untracked_files) in (True, False)
+ assert self.rorepo.is_dirty(
+ index, working_tree, untracked_files
+ ) in (True, False)
# END untracked files
# END working tree
# END index
@@ -379,7 +410,9 @@ class TestRepo(TestBase):
for index in (0, 1):
for working_tree in (0, 1):
for untracked_files in (0, 1):
- assert self.rorepo.is_dirty(index, working_tree, untracked_files, path=':!foo') in (True, False)
+ assert self.rorepo.is_dirty(
+ index, working_tree, untracked_files, path=":!foo"
+ ) in (True, False)
# END untracked files
# END working tree
# END index
@@ -388,7 +421,7 @@ class TestRepo(TestBase):
assert self.rorepo.is_dirty() is False
self.rorepo._bare = orig_val
- @with_rw_repo('HEAD')
+ @with_rw_repo("HEAD")
def test_is_dirty_with_path(self, rwrepo):
assert rwrepo.is_dirty(path="git") is False
@@ -407,17 +440,19 @@ class TestRepo(TestBase):
assert rwrepo.is_dirty(untracked_files=True, path="doc") is True
def test_head(self):
- self.assertEqual(self.rorepo.head.reference.object, self.rorepo.active_branch.object)
+ self.assertEqual(
+ self.rorepo.head.reference.object, self.rorepo.active_branch.object
+ )
def test_index(self):
index = self.rorepo.index
self.assertIsInstance(index, IndexFile)
def test_tag(self):
- assert self.rorepo.tag('refs/tags/0.1.5').commit
+ assert self.rorepo.tag("refs/tags/0.1.5").commit
def test_tag_to_full_tag_path(self):
- tags = ['0.1.5', 'tags/0.1.5', 'refs/tags/0.1.5']
+ tags = ["0.1.5", "tags/0.1.5", "refs/tags/0.1.5"]
value_errors = []
for tag in tags:
try:
@@ -427,16 +462,16 @@ class TestRepo(TestBase):
self.assertEqual(value_errors, [])
def test_archive(self):
- tmpfile = tempfile.mktemp(suffix='archive-test')
- with open(tmpfile, 'wb') as stream:
- self.rorepo.archive(stream, '0.1.6', path='doc')
+ tmpfile = tempfile.mktemp(suffix="archive-test")
+ with open(tmpfile, "wb") as stream:
+ self.rorepo.archive(stream, "0.1.6", path="doc")
assert stream.tell()
os.remove(tmpfile)
- @mock.patch.object(Git, '_call_process')
+ @mock.patch.object(Git, "_call_process")
def test_should_display_blame_information(self, git):
- git.return_value = fixture('blame')
- b = self.rorepo.blame('master', 'lib/git.py')
+ git.return_value = fixture("blame")
+ b = self.rorepo.blame("master", "lib/git.py")
self.assertEqual(13, len(b))
self.assertEqual(2, len(b[0]))
# self.assertEqual(25, reduce(lambda acc, x: acc + len(x[-1]), b))
@@ -444,85 +479,127 @@ class TestRepo(TestBase):
c = b[0][0]
self.assertTrue(git.called)
- self.assertEqual('634396b2f541a9f2d58b00be1a07f0c358b999b3', c.hexsha)
- self.assertEqual('Tom Preston-Werner', c.author.name)
- self.assertEqual('tom@mojombo.com', c.author.email)
+ self.assertEqual("634396b2f541a9f2d58b00be1a07f0c358b999b3", c.hexsha)
+ self.assertEqual("Tom Preston-Werner", c.author.name)
+ self.assertEqual("tom@mojombo.com", c.author.email)
self.assertEqual(1191997100, c.authored_date)
- self.assertEqual('Tom Preston-Werner', c.committer.name)
- self.assertEqual('tom@mojombo.com', c.committer.email)
+ self.assertEqual("Tom Preston-Werner", c.committer.name)
+ self.assertEqual("tom@mojombo.com", c.committer.email)
self.assertEqual(1191997100, c.committed_date)
- self.assertRaisesRegex(ValueError, "634396b2f541a9f2d58b00be1a07f0c358b999b3 missing", lambda: c.message)
+ self.assertRaisesRegex(
+ ValueError,
+ "634396b2f541a9f2d58b00be1a07f0c358b999b3 missing",
+ lambda: c.message,
+ )
# test the 'lines per commit' entries
tlist = b[0][1]
self.assertTrue(tlist)
self.assertTrue(isinstance(tlist[0], str))
- self.assertTrue(len(tlist) < sum(len(t) for t in tlist)) # test for single-char bug
+ self.assertTrue(
+ len(tlist) < sum(len(t) for t in tlist)
+ ) # test for single-char bug
# BINARY BLAME
- git.return_value = fixture('blame_binary')
- blames = self.rorepo.blame('master', 'rps')
+ git.return_value = fixture("blame_binary")
+ blames = self.rorepo.blame("master", "rps")
self.assertEqual(len(blames), 2)
def test_blame_real(self):
c = 0
- nml = 0 # amount of multi-lines per blame
+ nml = 0 # amount of multi-lines per blame
for item in self.rorepo.head.commit.tree.traverse(
- predicate=lambda i, d: i.type == 'blob' and i.path.endswith('.py')):
+ predicate=lambda i, d: i.type == "blob" and i.path.endswith(".py")
+ ):
c += 1
for b in self.rorepo.blame(self.rorepo.head, item.path):
nml += int(len(b[1]) > 1)
# END for each item to traverse
assert c, "Should have executed at least one blame command"
- assert nml, "There should at least be one blame commit that contains multiple lines"
+ assert (
+ nml
+ ), "There should at least be one blame commit that contains multiple lines"
- @mock.patch.object(Git, '_call_process')
+ @mock.patch.object(Git, "_call_process")
def test_blame_incremental(self, git):
# loop over two fixtures, create a test fixture for 2.11.1+ syntax
- for git_fixture in ('blame_incremental', 'blame_incremental_2.11.1_plus'):
+ for git_fixture in ("blame_incremental", "blame_incremental_2.11.1_plus"):
git.return_value = fixture(git_fixture)
- blame_output = self.rorepo.blame_incremental('9debf6b0aafb6f7781ea9d1383c86939a1aacde3', 'AUTHORS')
+ blame_output = self.rorepo.blame_incremental(
+ "9debf6b0aafb6f7781ea9d1383c86939a1aacde3", "AUTHORS"
+ )
blame_output = list(blame_output)
self.assertEqual(len(blame_output), 5)
# Check all outputted line numbers
ranges = flatten([entry.linenos for entry in blame_output])
- self.assertEqual(ranges, flatten([range(2, 3), range(14, 15), range(1, 2), range(3, 14), range(15, 17)]))
+ self.assertEqual(
+ ranges,
+ flatten(
+ [
+ range(2, 3),
+ range(14, 15),
+ range(1, 2),
+ range(3, 14),
+ range(15, 17),
+ ]
+ ),
+ )
commits = [entry.commit.hexsha[:7] for entry in blame_output]
- self.assertEqual(commits, ['82b8902', '82b8902', 'c76852d', 'c76852d', 'c76852d'])
+ self.assertEqual(
+ commits, ["82b8902", "82b8902", "c76852d", "c76852d", "c76852d"]
+ )
# Original filenames
- self.assertSequenceEqual([entry.orig_path for entry in blame_output], ['AUTHORS'] * len(blame_output))
+ self.assertSequenceEqual(
+ [entry.orig_path for entry in blame_output],
+ ["AUTHORS"] * len(blame_output),
+ )
# Original line numbers
orig_ranges = flatten([entry.orig_linenos for entry in blame_output])
- self.assertEqual(orig_ranges, flatten([range(2, 3), range(14, 15), range(1, 2), range(2, 13), range(13, 15)])) # noqa E501
-
- @mock.patch.object(Git, '_call_process')
+ self.assertEqual(
+ orig_ranges,
+ flatten(
+ [
+ range(2, 3),
+ range(14, 15),
+ range(1, 2),
+ range(2, 13),
+ range(13, 15),
+ ]
+ ),
+ ) # noqa E501
+
+ @mock.patch.object(Git, "_call_process")
def test_blame_complex_revision(self, git):
- git.return_value = fixture('blame_complex_revision')
+ git.return_value = fixture("blame_complex_revision")
res = self.rorepo.blame("HEAD~10..HEAD", "README.md")
self.assertEqual(len(res), 1)
self.assertEqual(len(res[0][1]), 83, "Unexpected amount of parsed blame lines")
- @skipIf(HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(),
- """FIXME: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute
+ @skipIf(
+ HIDE_WINDOWS_KNOWN_ERRORS and Git.is_cygwin(),
+ """FIXME: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute
raise GitCommandError(command, status, stderr_value, stdout_value)
GitCommandError: Cmd('git') failed due to: exit code(128)
cmdline: git add 1__��ava verb��ten 1_test _myfile 1_test_other_file
1_��ava-----verb��ten
stderr: 'fatal: pathspec '"1__çava verböten"' did not match any files'
- """)
- @with_rw_repo('HEAD', bare=False)
+ """,
+ )
+ @with_rw_repo("HEAD", bare=False)
def test_untracked_files(self, rwrepo):
for run, repo_add in enumerate((rwrepo.index.add, rwrepo.git.add)):
base = rwrepo.working_tree_dir
- files = (join_path_native(base, "%i_test _myfile" % run),
- join_path_native(base, "%i_test_other_file" % run),
- join_path_native(base, "%i__çava verböten" % run),
- join_path_native(base, "%i_çava-----verböten" % run))
+ files = (
+ join_path_native(base, "%i_test _myfile" % run),
+ join_path_native(base, "%i_test_other_file" % run),
+ join_path_native(base, "%i__çava verböten" % run),
+ join_path_native(base, "%i_çava-----verböten" % run),
+ )
num_recently_untracked = 0
for fpath in files:
@@ -538,13 +615,15 @@ class TestRepo(TestBase):
self.assertEqual(len(files), num_test_untracked)
repo_add(untracked_files)
- self.assertEqual(len(rwrepo.untracked_files), (num_recently_untracked - len(files)))
+ self.assertEqual(
+ len(rwrepo.untracked_files), (num_recently_untracked - len(files))
+ )
# end for each run
def test_config_reader(self):
- reader = self.rorepo.config_reader() # all config files
+ reader = self.rorepo.config_reader() # all config files
assert reader.read_only
- reader = self.rorepo.config_reader("repository") # single config file
+ reader = self.rorepo.config_reader("repository") # single config file
assert reader.read_only
def test_config_writer(self):
@@ -586,17 +665,17 @@ class TestRepo(TestBase):
@with_rw_directory
def test_tilde_and_env_vars_in_repo_path(self, rw_dir):
- ph = os.environ.get('HOME')
+ ph = os.environ.get("HOME")
try:
- os.environ['HOME'] = rw_dir
- Repo.init(osp.join('~', 'test.git'), bare=True)
+ os.environ["HOME"] = rw_dir
+ Repo.init(osp.join("~", "test.git"), bare=True)
- os.environ['FOO'] = rw_dir
- Repo.init(osp.join('$FOO', 'test.git'), bare=True)
+ os.environ["FOO"] = rw_dir
+ Repo.init(osp.join("$FOO", "test.git"), bare=True)
finally:
if ph:
- os.environ['HOME'] = ph
- del os.environ['FOO']
+ os.environ["HOME"] = ph
+ del os.environ["FOO"]
# end assure HOME gets reset to what it was
def test_git_cmd(self):
@@ -623,7 +702,7 @@ class TestRepo(TestBase):
s = mkfull()
lines = s.readlines()
self.assertEqual(len(lines), 3)
- self.assertTrue(lines[-1].endswith(b'\n'), lines[-1])
+ self.assertTrue(lines[-1].endswith(b"\n"), lines[-1])
self.assertEqual(s._stream.tell(), len(d)) # must have scrubbed to the end
# realines line limit
@@ -643,7 +722,7 @@ class TestRepo(TestBase):
self.assertEqual(s.readline(), l1)
self.assertEqual(s.readline(), l2)
self.assertEqual(s.readline(), l3)
- self.assertEqual(s.readline(), b'')
+ self.assertEqual(s.readline(), b"")
self.assertEqual(s._stream.tell(), len(d))
# readline limit
@@ -654,13 +733,13 @@ class TestRepo(TestBase):
# readline on tiny section
s = mktiny()
self.assertEqual(s.readline(), l1p)
- self.assertEqual(s.readline(), b'')
+ self.assertEqual(s.readline(), b"")
self.assertEqual(s._stream.tell(), ts + 1)
# read no limit
s = mkfull()
self.assertEqual(s.read(), d[:-1])
- self.assertEqual(s.read(), b'')
+ self.assertEqual(s.read(), b"")
self.assertEqual(s._stream.tell(), len(d))
# read limit
@@ -679,24 +758,24 @@ class TestRepo(TestBase):
def _assert_rev_parse_types(self, name, rev_obj):
rev_parse = self.rorepo.rev_parse
- if rev_obj.type == 'tag':
+ if rev_obj.type == "tag":
rev_obj = rev_obj.object
# tree and blob type
- obj = rev_parse(name + '^{tree}')
+ obj = rev_parse(name + "^{tree}")
self.assertEqual(obj, rev_obj.tree)
- obj = rev_parse(name + ':CHANGES')
- self.assertEqual(obj.type, 'blob')
- self.assertEqual(obj.path, 'CHANGES')
- self.assertEqual(rev_obj.tree['CHANGES'], obj)
+ obj = rev_parse(name + ":CHANGES")
+ self.assertEqual(obj.type, "blob")
+ self.assertEqual(obj.path, "CHANGES")
+ self.assertEqual(rev_obj.tree["CHANGES"], obj)
def _assert_rev_parse(self, name):
"""tries multiple different rev-parse syntaxes with the given name
:return: parsed object"""
rev_parse = self.rorepo.rev_parse
orig_obj = rev_parse(name)
- if orig_obj.type == 'tag':
+ if orig_obj.type == "tag":
obj = orig_obj.object
else:
obj = orig_obj
@@ -737,17 +816,19 @@ class TestRepo(TestBase):
return orig_obj
- @with_rw_repo('HEAD', bare=False)
+ @with_rw_repo("HEAD", bare=False)
def test_rw_rev_parse(self, rwrepo):
# verify it does not confuse branches with hexsha ids
- ahead = rwrepo.create_head('aaaaaaaa')
- assert(rwrepo.rev_parse(str(ahead)) == ahead.commit)
+ ahead = rwrepo.create_head("aaaaaaaa")
+ assert rwrepo.rev_parse(str(ahead)) == ahead.commit
def test_rev_parse(self):
rev_parse = self.rorepo.rev_parse
# try special case: This one failed at some point, make sure its fixed
- self.assertEqual(rev_parse("33ebe").hexsha, "33ebe7acec14b25c5f84f35a664803fcab2f7781")
+ self.assertEqual(
+ rev_parse("33ebe").hexsha, "33ebe7acec14b25c5f84f35a664803fcab2f7781"
+ )
# start from reference
num_resolved = 0
@@ -755,7 +836,7 @@ class TestRepo(TestBase):
for ref_no, ref in enumerate(Reference.iter_items(self.rorepo)):
path_tokens = ref.path.split("/")
for pt in range(len(path_tokens)):
- path_section = '/'.join(path_tokens[-(pt + 1):])
+ path_section = "/".join(path_tokens[-(pt + 1) :])
try:
obj = self._assert_rev_parse(path_section)
self.assertEqual(obj.type, ref.object.type)
@@ -771,17 +852,17 @@ class TestRepo(TestBase):
assert num_resolved
# it works with tags !
- tag = self._assert_rev_parse('0.1.4')
- self.assertEqual(tag.type, 'tag')
+ tag = self._assert_rev_parse("0.1.4")
+ self.assertEqual(tag.type, "tag")
# try full sha directly ( including type conversion )
self.assertEqual(tag.object, rev_parse(tag.object.hexsha))
self._assert_rev_parse_types(tag.object.hexsha, tag.object)
# multiple tree types result in the same tree: HEAD^{tree}^{tree}:CHANGES
- rev = '0.1.4^{tree}^{tree}'
+ rev = "0.1.4^{tree}^{tree}"
self.assertEqual(rev_parse(rev), tag.object.tree)
- self.assertEqual(rev_parse(rev + ':CHANGES'), tag.object.tree['CHANGES'])
+ self.assertEqual(rev_parse(rev + ":CHANGES"), tag.object.tree["CHANGES"])
# try to get parents from first revision - it should fail as no such revision
# exists
@@ -802,15 +883,18 @@ class TestRepo(TestBase):
# needs a tag which points to a blob
# ref^0 returns commit being pointed to, same with ref~0, and ^{}
- tag = rev_parse('0.1.4')
- for token in (('~0', '^0', '^{}')):
- self.assertEqual(tag.object, rev_parse('0.1.4%s' % token))
+ tag = rev_parse("0.1.4")
+ for token in ("~0", "^0", "^{}"):
+ self.assertEqual(tag.object, rev_parse("0.1.4%s" % token))
# END handle multiple tokens
# try partial parsing
max_items = 40
for i, binsha in enumerate(self.rorepo.odb.sha_iter()):
- self.assertEqual(rev_parse(bin_to_hex(binsha)[:8 - (i % 2)].decode('ascii')).binsha, binsha)
+ self.assertEqual(
+ rev_parse(bin_to_hex(binsha)[: 8 - (i % 2)].decode("ascii")).binsha,
+ binsha,
+ )
if i > max_items:
# this is rather slow currently, as rev_parse returns an object
# which requires accessing packs, it has some additional overhead
@@ -818,10 +902,10 @@ class TestRepo(TestBase):
# END for each binsha in repo
# missing closing brace commit^{tree
- self.assertRaises(ValueError, rev_parse, '0.1.4^{tree')
+ self.assertRaises(ValueError, rev_parse, "0.1.4^{tree")
# missing starting brace
- self.assertRaises(ValueError, rev_parse, '0.1.4^tree}')
+ self.assertRaises(ValueError, rev_parse, "0.1.4^tree}")
# REVLOG
#######
@@ -831,23 +915,23 @@ class TestRepo(TestBase):
self.assertRaises(BadObject, rev_parse, "%s@{0}" % head.commit.hexsha)
# uses HEAD.ref by default
- self.assertEqual(rev_parse('@{0}'), head.commit)
+ self.assertEqual(rev_parse("@{0}"), head.commit)
if not head.is_detached:
- refspec = '%s@{0}' % head.ref.name
+ refspec = "%s@{0}" % head.ref.name
self.assertEqual(rev_parse(refspec), head.ref.commit)
# all additional specs work as well
self.assertEqual(rev_parse(refspec + "^{tree}"), head.commit.tree)
- self.assertEqual(rev_parse(refspec + ":CHANGES").type, 'blob')
+ self.assertEqual(rev_parse(refspec + ":CHANGES").type, "blob")
# END operate on non-detached head
# position doesn't exist
- self.assertRaises(IndexError, rev_parse, '@{10000}')
+ self.assertRaises(IndexError, rev_parse, "@{10000}")
# currently, nothing more is supported
self.assertRaises(NotImplementedError, rev_parse, "@{1 week ago}")
# the last position
- assert rev_parse('@{1}') != head.commit
+ assert rev_parse("@{1}") != head.commit
def test_repo_odbtype(self):
target_type = GitCmdObjectDB
@@ -860,7 +944,7 @@ class TestRepo(TestBase):
self.assertIsInstance(self.rorepo.submodule("gitdb"), Submodule)
self.assertRaises(ValueError, self.rorepo.submodule, "doesn't exist")
- @with_rw_repo('HEAD', bare=False)
+ @with_rw_repo("HEAD", bare=False)
def test_submodule_update(self, rwrepo):
# fails in bare mode
rwrepo._bare = True
@@ -869,27 +953,31 @@ class TestRepo(TestBase):
# test create submodule
sm = rwrepo.submodules[0]
- sm = rwrepo.create_submodule("my_new_sub", "some_path", join_path_native(self.rorepo.working_tree_dir, sm.path))
+ sm = rwrepo.create_submodule(
+ "my_new_sub",
+ "some_path",
+ join_path_native(self.rorepo.working_tree_dir, sm.path),
+ )
self.assertIsInstance(sm, Submodule)
# note: the rest of this functionality is tested in test_submodule
- @with_rw_repo('HEAD')
+ @with_rw_repo("HEAD")
def test_git_file(self, rwrepo):
# Move the .git directory to another location and create the .git file.
- real_path_abs = osp.abspath(join_path_native(rwrepo.working_tree_dir, '.real'))
+ real_path_abs = osp.abspath(join_path_native(rwrepo.working_tree_dir, ".real"))
os.rename(rwrepo.git_dir, real_path_abs)
- git_file_path = join_path_native(rwrepo.working_tree_dir, '.git')
- with open(git_file_path, 'wb') as fp:
- fp.write(fixture('git_file'))
+ git_file_path = join_path_native(rwrepo.working_tree_dir, ".git")
+ with open(git_file_path, "wb") as fp:
+ fp.write(fixture("git_file"))
# Create a repo and make sure it's pointing to the relocated .git directory.
git_file_repo = Repo(rwrepo.working_tree_dir)
self.assertEqual(osp.abspath(git_file_repo.git_dir), real_path_abs)
# Test using an absolute gitdir path in the .git file.
- with open(git_file_path, 'wb') as fp:
- fp.write(('gitdir: %s\n' % real_path_abs).encode('ascii'))
+ with open(git_file_path, "wb") as fp:
+ fp.write(("gitdir: %s\n" % real_path_abs).encode("ascii"))
git_file_repo = Repo(rwrepo.working_tree_dir)
self.assertEqual(osp.abspath(git_file_repo.git_dir), real_path_abs)
@@ -906,13 +994,13 @@ class TestRepo(TestBase):
for _ in range(64):
for repo_type in (GitCmdObjectDB, GitDB):
repo = Repo(self.rorepo.working_tree_dir, odbt=repo_type)
- last_commit(repo, 'master', 'test/test_base.py')
+ last_commit(repo, "master", "test/test_base.py")
# end for each repository type
# end for each iteration
def test_remote_method(self):
- self.assertRaises(ValueError, self.rorepo.remote, 'foo-blue')
- self.assertIsInstance(self.rorepo.remote(name='origin'), Remote)
+ self.assertRaises(ValueError, self.rorepo.remote, "foo-blue")
+ self.assertIsInstance(self.rorepo.remote(name="origin"), Remote)
@with_rw_directory
def test_empty_repo(self, rw_dir):
@@ -920,13 +1008,13 @@ class TestRepo(TestBase):
r = Repo.init(rw_dir, mkdir=False)
# It's ok not to be able to iterate a commit, as there is none
self.assertRaises(ValueError, r.iter_commits)
- self.assertEqual(r.active_branch.name, 'master')
+ self.assertEqual(r.active_branch.name, "master")
assert not r.active_branch.is_valid(), "Branch is yet to be born"
# actually, when trying to create a new branch without a commit, git itself fails
# We should, however, not fail ungracefully
- self.assertRaises(BadName, r.create_head, 'foo')
- self.assertRaises(BadName, r.create_head, 'master')
+ self.assertRaises(BadName, r.create_head, "foo")
+ self.assertRaises(BadName, r.create_head, "master")
# It's expected to not be able to access a tree
self.assertRaises(ValueError, r.tree)
@@ -936,43 +1024,43 @@ class TestRepo(TestBase):
r.index.commit("initial commit\nBAD MESSAGE 1\n")
# Now a branch should be creatable
- nb = r.create_head('foo')
+ nb = r.create_head("foo")
assert nb.is_valid()
- with open(new_file_path, 'w') as f:
- f.write('Line 1\n')
+ with open(new_file_path, "w") as f:
+ f.write("Line 1\n")
r.index.add([new_file_path])
r.index.commit("add line 1\nBAD MESSAGE 2\n")
- with open('%s/.git/logs/refs/heads/master' % (rw_dir,), 'r') as f:
+ with open("%s/.git/logs/refs/heads/master" % (rw_dir,), "r") as f:
contents = f.read()
- assert 'BAD MESSAGE' not in contents, 'log is corrupt'
+ assert "BAD MESSAGE" not in contents, "log is corrupt"
def test_merge_base(self):
repo = self.rorepo
- c1 = 'f6aa8d1'
- c2 = repo.commit('d46e3fe')
- c3 = '763ef75'
+ c1 = "f6aa8d1"
+ c2 = repo.commit("d46e3fe")
+ c3 = "763ef75"
self.assertRaises(ValueError, repo.merge_base)
- self.assertRaises(ValueError, repo.merge_base, 'foo')
+ self.assertRaises(ValueError, repo.merge_base, "foo")
# two commit merge-base
res = repo.merge_base(c1, c2)
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
self.assertIsInstance(res[0], Commit)
- self.assertTrue(res[0].hexsha.startswith('3936084'))
+ self.assertTrue(res[0].hexsha.startswith("3936084"))
- for kw in ('a', 'all'):
+ for kw in ("a", "all"):
res = repo.merge_base(c1, c2, c3, **{kw: True})
self.assertIsInstance(res, list)
self.assertEqual(len(res), 1)
# end for each keyword signalling all merge-bases to be returned
# Test for no merge base - can't do as we have
- self.assertRaises(GitCommandError, repo.merge_base, c1, 'ffffff')
+ self.assertRaises(GitCommandError, repo.merge_base, c1, "ffffff")
def test_is_ancestor(self):
git = self.rorepo.git
@@ -980,23 +1068,23 @@ class TestRepo(TestBase):
raise SkipTest("git merge-base --is-ancestor feature unsupported")
repo = self.rorepo
- c1 = 'f6aa8d1'
- c2 = '763ef75'
+ c1 = "f6aa8d1"
+ c2 = "763ef75"
self.assertTrue(repo.is_ancestor(c1, c1))
self.assertTrue(repo.is_ancestor("master", "master"))
self.assertTrue(repo.is_ancestor(c1, c2))
self.assertTrue(repo.is_ancestor(c1, "master"))
self.assertFalse(repo.is_ancestor(c2, c1))
self.assertFalse(repo.is_ancestor("master", c1))
- for i, j in itertools.permutations([c1, 'ffffff', ''], r=2):
+ for i, j in itertools.permutations([c1, "ffffff", ""], r=2):
self.assertRaises(GitCommandError, repo.is_ancestor, i, j)
def test_is_valid_object(self):
repo = self.rorepo
- commit_sha = 'f6aa8d1'
- blob_sha = '1fbe3e4375'
- tree_sha = '960b40fe36'
- tag_sha = '42c2f60c43'
+ commit_sha = "f6aa8d1"
+ blob_sha = "1fbe3e4375"
+ tree_sha = "960b40fe36"
+ tag_sha = "42c2f60c43"
# Check for valid objects
self.assertTrue(repo.is_valid_object(commit_sha))
@@ -1005,19 +1093,21 @@ class TestRepo(TestBase):
self.assertTrue(repo.is_valid_object(tag_sha))
# Check for valid objects of specific type
- self.assertTrue(repo.is_valid_object(commit_sha, 'commit'))
- self.assertTrue(repo.is_valid_object(blob_sha, 'blob'))
- self.assertTrue(repo.is_valid_object(tree_sha, 'tree'))
- self.assertTrue(repo.is_valid_object(tag_sha, 'tag'))
+ self.assertTrue(repo.is_valid_object(commit_sha, "commit"))
+ self.assertTrue(repo.is_valid_object(blob_sha, "blob"))
+ self.assertTrue(repo.is_valid_object(tree_sha, "tree"))
+ self.assertTrue(repo.is_valid_object(tag_sha, "tag"))
# Check for invalid objects
- self.assertFalse(repo.is_valid_object(b'1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a', 'blob'))
+ self.assertFalse(
+ repo.is_valid_object(b"1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a1a", "blob")
+ )
# Check for invalid objects of specific type
- self.assertFalse(repo.is_valid_object(commit_sha, 'blob'))
- self.assertFalse(repo.is_valid_object(blob_sha, 'commit'))
- self.assertFalse(repo.is_valid_object(tree_sha, 'commit'))
- self.assertFalse(repo.is_valid_object(tag_sha, 'commit'))
+ self.assertFalse(repo.is_valid_object(commit_sha, "blob"))
+ self.assertFalse(repo.is_valid_object(blob_sha, "commit"))
+ self.assertFalse(repo.is_valid_object(tree_sha, "commit"))
+ self.assertFalse(repo.is_valid_object(tag_sha, "commit"))
@with_rw_directory
def test_git_work_tree_dotgit(self, rw_dir):
@@ -1027,12 +1117,12 @@ class TestRepo(TestBase):
if git.version_info[:3] < (2, 5, 1):
raise SkipTest("worktree feature unsupported")
- rw_master = self.rorepo.clone(join_path_native(rw_dir, 'master_repo'))
- branch = rw_master.create_head('aaaaaaaa')
- worktree_path = join_path_native(rw_dir, 'worktree_repo')
+ rw_master = self.rorepo.clone(join_path_native(rw_dir, "master_repo"))
+ branch = rw_master.create_head("aaaaaaaa")
+ worktree_path = join_path_native(rw_dir, "worktree_repo")
if Git.is_cygwin():
worktree_path = cygpath(worktree_path)
- rw_master.git.worktree('add', worktree_path, branch.name)
+ rw_master.git.worktree("add", worktree_path, branch.name)
# this ensures that we can read the repo's gitdir correctly
repo = Repo(worktree_path)
@@ -1048,7 +1138,7 @@ class TestRepo(TestBase):
origin = repo.remotes.origin
self.assertIsInstance(origin, Remote)
- self.assertIsInstance(repo.heads['aaaaaaaa'], Head)
+ self.assertIsInstance(repo.heads["aaaaaaaa"], Head)
@with_rw_directory
def test_git_work_tree_env(self, rw_dir):
@@ -1057,18 +1147,18 @@ class TestRepo(TestBase):
# move .git directory to a subdirectory
# set GIT_DIR and GIT_WORK_TREE appropriately
# check that repo.working_tree_dir == rw_dir
- self.rorepo.clone(join_path_native(rw_dir, 'master_repo'))
+ self.rorepo.clone(join_path_native(rw_dir, "master_repo"))
- repo_dir = join_path_native(rw_dir, 'master_repo')
- old_git_dir = join_path_native(repo_dir, '.git')
- new_subdir = join_path_native(repo_dir, 'gitdir')
- new_git_dir = join_path_native(new_subdir, 'git')
+ repo_dir = join_path_native(rw_dir, "master_repo")
+ old_git_dir = join_path_native(repo_dir, ".git")
+ new_subdir = join_path_native(repo_dir, "gitdir")
+ new_git_dir = join_path_native(new_subdir, "git")
os.mkdir(new_subdir)
os.rename(old_git_dir, new_git_dir)
oldenv = os.environ.copy()
- os.environ['GIT_DIR'] = new_git_dir
- os.environ['GIT_WORK_TREE'] = repo_dir
+ os.environ["GIT_DIR"] = new_git_dir
+ os.environ["GIT_WORK_TREE"] = repo_dir
try:
r = Repo()
@@ -1080,15 +1170,18 @@ class TestRepo(TestBase):
@with_rw_directory
def test_rebasing(self, rw_dir):
r = Repo.init(rw_dir)
- fp = osp.join(rw_dir, 'hello.txt')
- r.git.commit("--allow-empty", message="init",)
- with open(fp, 'w') as fs:
+ fp = osp.join(rw_dir, "hello.txt")
+ r.git.commit(
+ "--allow-empty",
+ message="init",
+ )
+ with open(fp, "w") as fs:
fs.write("hello world")
r.git.add(Git.polish_url(fp))
r.git.commit(message="English")
self.assertEqual(r.currently_rebasing_on(), None)
r.git.checkout("HEAD^1")
- with open(fp, 'w') as fs:
+ with open(fp, "w") as fs:
fs.write("Hola Mundo")
r.git.add(Git.polish_url(fp))
r.git.commit(message="Spanish")
@@ -1102,9 +1195,11 @@ class TestRepo(TestBase):
@with_rw_directory
def test_do_not_strip_newline_in_stdout(self, rw_dir):
r = Repo.init(rw_dir)
- fp = osp.join(rw_dir, 'hello.txt')
- with open(fp, 'w') as fs:
+ fp = osp.join(rw_dir, "hello.txt")
+ with open(fp, "w") as fs:
fs.write("hello\n")
r.git.add(Git.polish_url(fp))
r.git.commit(message="init")
- self.assertEqual(r.git.show("HEAD:hello.txt", strip_newline_in_stdout=False), 'hello\n')
+ self.assertEqual(
+ r.git.show("HEAD:hello.txt", strip_newline_in_stdout=False), "hello\n"
+ )
diff --git a/test/test_stats.py b/test/test_stats.py
index 2759698a..1f689655 100644
--- a/test/test_stats.py
+++ b/test/test_stats.py
@@ -4,27 +4,23 @@
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
-from test.lib import (
- TestBase,
- fixture
-)
+from test.lib import TestBase, fixture
from git import Stats
from git.compat import defenc
class TestStats(TestBase):
-
def test_list_from_string(self):
- output = fixture('diff_numstat').decode(defenc)
+ output = fixture("diff_numstat").decode(defenc)
stats = Stats._list_from_string(self.rorepo, output)
- self.assertEqual(2, stats.total['files'])
- self.assertEqual(52, stats.total['lines'])
- self.assertEqual(29, stats.total['insertions'])
- self.assertEqual(23, stats.total['deletions'])
+ self.assertEqual(2, stats.total["files"])
+ self.assertEqual(52, stats.total["lines"])
+ self.assertEqual(29, stats.total["insertions"])
+ self.assertEqual(23, stats.total["deletions"])
- self.assertEqual(29, stats.files["a.txt"]['insertions'])
- self.assertEqual(18, stats.files["a.txt"]['deletions'])
+ self.assertEqual(29, stats.files["a.txt"]["insertions"])
+ self.assertEqual(18, stats.files["a.txt"]["deletions"])
- self.assertEqual(0, stats.files["b.txt"]['insertions'])
- self.assertEqual(5, stats.files["b.txt"]['deletions'])
+ self.assertEqual(0, stats.files["b.txt"]["insertions"])
+ self.assertEqual(5, stats.files["b.txt"]["deletions"])
diff --git a/test/test_submodule.py b/test/test_submodule.py
index a79123dc..fc96391d 100644
--- a/test/test_submodule.py
+++ b/test/test_submodule.py
@@ -9,20 +9,11 @@ import git
from git.cmd import Git
from git.compat import is_win
from git.config import GitConfigParser, cp
-from git.exc import (
- InvalidGitRepositoryError,
- RepositoryDirtyError
-)
+from git.exc import InvalidGitRepositoryError, RepositoryDirtyError
from git.objects.submodule.base import Submodule
from git.objects.submodule.root import RootModule, RootUpdateProgress
-from git.repo.fun import (
- find_submodule_git_dir,
- touch
-)
-from test.lib import (
- TestBase,
- with_rw_repo
-)
+from git.repo.fun import find_submodule_git_dir, touch
+from test.lib import TestBase, with_rw_repo
from test.lib import with_rw_directory
from git.util import HIDE_WINDOWS_KNOWN_ERRORS
from git.util import to_native_path_linux, join_path_native
@@ -32,7 +23,7 @@ import os.path as osp
class TestRootProgress(RootUpdateProgress):
"""Just prints messages, for now without checking the correctness of the states"""
- def update(self, op, cur_count, max_count, message=''):
+ def update(self, op, cur_count, max_count, message=""):
print(op, cur_count, max_count, message)
@@ -40,9 +31,9 @@ prog = TestRootProgress()
class TestSubmodule(TestBase):
-
def tearDown(self):
import gc
+
gc.collect()
k_subm_current = "c15a6e1923a14bc760851913858a3942a4193cdb"
@@ -54,7 +45,7 @@ class TestSubmodule(TestBase):
# manual instantiation
smm = Submodule(rwrepo, "\0" * 20)
# name needs to be set in advance
- self.assertRaises(AttributeError, getattr, smm, 'name')
+ self.assertRaises(AttributeError, getattr, smm, "name")
# iterate - 1 submodule
sms = Submodule.list_items(rwrepo, self.k_subm_current)
@@ -64,11 +55,13 @@ class TestSubmodule(TestBase):
# at a different time, there is None
assert len(Submodule.list_items(rwrepo, self.k_no_subm_tag)) == 0
- assert sm.path == 'git/ext/gitdb'
- assert sm.path != sm.name # in our case, we have ids there, which don't equal the path
- assert sm.url.endswith('github.com/gitpython-developers/gitdb.git')
- assert sm.branch_path == 'refs/heads/master' # the default ...
- assert sm.branch_name == 'master'
+ assert sm.path == "git/ext/gitdb"
+ assert (
+ sm.path != sm.name
+ ) # in our case, we have ids there, which don't equal the path
+ assert sm.url.endswith("github.com/gitpython-developers/gitdb.git")
+ assert sm.branch_path == "refs/heads/master" # the default ...
+ assert sm.branch_name == "master"
assert sm.parent_commit == rwrepo.head.commit
# size is always 0
assert sm.size == 0
@@ -76,7 +69,7 @@ class TestSubmodule(TestBase):
self.assertRaises(InvalidGitRepositoryError, sm.module)
# which is why we can't get the branch either - it points into the module() repository
- self.assertRaises(InvalidGitRepositoryError, getattr, sm, 'branch')
+ self.assertRaises(InvalidGitRepositoryError, getattr, sm, "branch")
# branch_path works, as its just a string
assert isinstance(sm.branch_path, str)
@@ -84,16 +77,16 @@ class TestSubmodule(TestBase):
# some commits earlier we still have a submodule, but its at a different commit
smold = next(Submodule.iter_items(rwrepo, self.k_subm_changed))
assert smold.binsha != sm.binsha
- assert smold != sm # the name changed
+ assert smold != sm # the name changed
# force it to reread its information
- del(smold._url)
+ del smold._url
smold.url == sm.url # @NoEffect
# test config_reader/writer methods
sm.config_reader()
- new_smclone_path = None # keep custom paths for later
- new_csmclone_path = None #
+ new_smclone_path = None # keep custom paths for later
+ new_csmclone_path = None #
if rwrepo.bare:
with self.assertRaises(InvalidGitRepositoryError):
with sm.config_writer() as cw:
@@ -101,10 +94,12 @@ class TestSubmodule(TestBase):
else:
with sm.config_writer() as writer:
# for faster checkout, set the url to the local path
- new_smclone_path = Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path))
- writer.set_value('url', new_smclone_path)
+ new_smclone_path = Git.polish_url(
+ osp.join(self.rorepo.working_tree_dir, sm.path)
+ )
+ writer.set_value("url", new_smclone_path)
writer.release()
- assert sm.config_reader().get_value('url') == new_smclone_path
+ assert sm.config_reader().get_value("url") == new_smclone_path
assert sm.url == new_smclone_path
# END handle bare repo
smold.config_reader()
@@ -134,7 +129,9 @@ class TestSubmodule(TestBase):
if rwrepo.bare:
self.assertRaises(InvalidGitRepositoryError, sm.module)
self.assertRaises(InvalidGitRepositoryError, sm.remove)
- self.assertRaises(InvalidGitRepositoryError, sm.add, rwrepo, 'here', 'there')
+ self.assertRaises(
+ InvalidGitRepositoryError, sm.add, rwrepo, "here", "there"
+ )
else:
# its not checked out in our case
self.assertRaises(InvalidGitRepositoryError, sm.module)
@@ -152,13 +149,15 @@ class TestSubmodule(TestBase):
assert sma.path == sm.path
# no url and no module at path fails
- self.assertRaises(ValueError, Submodule.add, rwrepo, "newsubm", "pathtorepo", url=None)
+ self.assertRaises(
+ ValueError, Submodule.add, rwrepo, "newsubm", "pathtorepo", url=None
+ )
# CONTINUE UPDATE
#################
# lets update it - its a recursive one too
- newdir = osp.join(sm.abspath, 'dir')
+ newdir = osp.join(sm.abspath, "dir")
os.makedirs(newdir)
# update fails if the path already exists non-empty
@@ -170,7 +169,7 @@ class TestSubmodule(TestBase):
assert not sm.module_exists()
assert sm.update() is sm
- sm_repopath = sm.path # cache for later
+ sm_repopath = sm.path # cache for later
assert sm.module_exists()
assert isinstance(sm.module(), git.Repo)
assert sm.module().working_tree_dir == sm.abspath
@@ -179,7 +178,14 @@ class TestSubmodule(TestBase):
#####################
# url must match the one in the existing repository ( if submodule name suggests a new one )
# or we raise
- self.assertRaises(ValueError, Submodule.add, rwrepo, "newsubm", sm.path, "git://someurl/repo.git")
+ self.assertRaises(
+ ValueError,
+ Submodule.add,
+ rwrepo,
+ "newsubm",
+ sm.path,
+ "git://someurl/repo.git",
+ )
# CONTINUE UPDATE
#################
@@ -197,15 +203,17 @@ class TestSubmodule(TestBase):
sm.update(recursive=False)
assert len(list(rwrepo.iter_submodules())) == 2
- assert len(sm.children()) == 1 # its not checked out yet
+ assert len(sm.children()) == 1 # its not checked out yet
csm = sm.children()[0]
assert not csm.module_exists()
csm_repopath = csm.path
# adjust the path of the submodules module to point to the local destination
- new_csmclone_path = Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path, csm.path))
+ new_csmclone_path = Git.polish_url(
+ osp.join(self.rorepo.working_tree_dir, sm.path, csm.path)
+ )
with csm.config_writer() as writer:
- writer.set_value('url', new_csmclone_path)
+ writer.set_value("url", new_csmclone_path)
assert csm.url == new_csmclone_path
# dry-run does nothing
@@ -226,26 +234,34 @@ class TestSubmodule(TestBase):
# reset both heads to the previous version, verify that to_latest_revision works
smods = (sm.module(), csm.module())
for repo in smods:
- repo.head.reset('HEAD~2', working_tree=1)
+ repo.head.reset("HEAD~2", working_tree=1)
# END for each repo to reset
# dry run does nothing
- self.assertRaises(RepositoryDirtyError, sm.update, recursive=True, dry_run=True, progress=prog)
+ self.assertRaises(
+ RepositoryDirtyError,
+ sm.update,
+ recursive=True,
+ dry_run=True,
+ progress=prog,
+ )
sm.update(recursive=True, dry_run=True, progress=prog, force=True)
for repo in smods:
assert repo.head.commit != repo.head.ref.tracking_branch().commit
# END for each repo to check
- self.assertRaises(RepositoryDirtyError, sm.update, recursive=True, to_latest_revision=True)
+ self.assertRaises(
+ RepositoryDirtyError, sm.update, recursive=True, to_latest_revision=True
+ )
sm.update(recursive=True, to_latest_revision=True, force=True)
for repo in smods:
assert repo.head.commit == repo.head.ref.tracking_branch().commit
# END for each repo to check
- del(smods)
+ del smods
# if the head is detached, it still works ( but warns )
smref = sm.module().head.ref
- sm.module().head.ref = 'HEAD~1'
+ sm.module().head.ref = "HEAD~1"
# if there is no tracking branch, we get a warning as well
csm_tracking_branch = csm.module().head.ref.tracking_branch()
csm.module().head.ref.set_tracking_branch(None)
@@ -268,8 +284,10 @@ class TestSubmodule(TestBase):
# to GitHub. To save time, we will change it to
csm.set_parent_commit(csm.repo.head.commit)
with csm.config_writer() as cw:
- cw.set_value('url', self._small_repo_url())
- csm.repo.index.commit("adjusted URL to point to local source, instead of the internet")
+ cw.set_value("url", self._small_repo_url())
+ csm.repo.index.commit(
+ "adjusted URL to point to local source, instead of the internet"
+ )
# We have modified the configuration, hence the index is dirty, and the
# deletion will fail
@@ -301,7 +319,7 @@ class TestSubmodule(TestBase):
# but ... we have untracked files in the child submodule
fn = join_path_native(csm.module().working_tree_dir, "newfile")
- with open(fn, 'w') as fd:
+ with open(fn, "w") as fd:
fd.write("hi")
self.assertRaises(InvalidGitRepositoryError, sm.remove)
@@ -323,14 +341,14 @@ class TestSubmodule(TestBase):
sm.remove(configuration=False, force=True)
assert sm.exists()
assert not sm.module_exists()
- assert sm.config_reader().get_value('url')
+ assert sm.config_reader().get_value("url")
# delete the rest
sm_path = sm.path
sm.remove()
assert not sm.exists()
assert not sm.module_exists()
- self.assertRaises(ValueError, getattr, sm, 'path')
+ self.assertRaises(ValueError, getattr, sm, "path")
assert len(rwrepo.submodules) == 0
@@ -339,20 +357,35 @@ class TestSubmodule(TestBase):
# add a simple remote repo - trailing slashes are no problem
smid = "newsub"
osmid = "othersub"
- nsm = Submodule.add(rwrepo, smid, sm_repopath, new_smclone_path + "/", None, no_checkout=True)
+ nsm = Submodule.add(
+ rwrepo,
+ smid,
+ sm_repopath,
+ new_smclone_path + "/",
+ None,
+ no_checkout=True,
+ )
assert nsm.name == smid
assert nsm.module_exists()
assert nsm.exists()
# its not checked out
- assert not osp.isfile(join_path_native(nsm.module().working_tree_dir, Submodule.k_modules_file))
+ assert not osp.isfile(
+ join_path_native(
+ nsm.module().working_tree_dir, Submodule.k_modules_file
+ )
+ )
assert len(rwrepo.submodules) == 1
# add another submodule, but into the root, not as submodule
- osm = Submodule.add(rwrepo, osmid, csm_repopath, new_csmclone_path, Submodule.k_head_default)
+ osm = Submodule.add(
+ rwrepo, osmid, csm_repopath, new_csmclone_path, Submodule.k_head_default
+ )
assert osm != nsm
assert osm.module_exists()
assert osm.exists()
- assert osp.isfile(join_path_native(osm.module().working_tree_dir, 'setup.py'))
+ assert osp.isfile(
+ join_path_native(osm.module().working_tree_dir, "setup.py")
+ )
assert len(rwrepo.submodules) == 2
@@ -368,7 +401,9 @@ class TestSubmodule(TestBase):
# MOVE MODULE
#############
# invalid input
- self.assertRaises(ValueError, nsm.move, 'doesntmatter', module=False, configuration=False)
+ self.assertRaises(
+ ValueError, nsm.move, "doesntmatter", module=False, configuration=False
+ )
# renaming to the same path does nothing
assert nsm.move(sm_path) is nsm
@@ -377,14 +412,14 @@ class TestSubmodule(TestBase):
nmp = join_path_native("new", "module", "dir") + "/" # new module path
pmp = nsm.path
assert nsm.move(nmp) is nsm
- nmp = nmp[:-1] # cut last /
+ nmp = nmp[:-1] # cut last /
nmpl = to_native_path_linux(nmp)
assert nsm.path == nmpl
assert rwrepo.submodules[0].path == nmpl
- mpath = 'newsubmodule'
+ mpath = "newsubmodule"
absmpath = join_path_native(rwrepo.working_tree_dir, mpath)
- open(absmpath, 'w').write('')
+ open(absmpath, "w").write("")
self.assertRaises(ValueError, nsm.move, mpath)
os.remove(absmpath)
@@ -402,11 +437,19 @@ class TestSubmodule(TestBase):
for remote in osmod.remotes:
remote.remove(osmod, remote.name)
assert not osm.exists()
- self.assertRaises(ValueError, Submodule.add, rwrepo, osmid, csm_repopath, url=None)
+ self.assertRaises(
+ ValueError, Submodule.add, rwrepo, osmid, csm_repopath, url=None
+ )
# END handle bare mode
# Error if there is no submodule file here
- self.assertRaises(IOError, Submodule._config_parser, rwrepo, rwrepo.commit(self.k_no_subm_tag), True)
+ self.assertRaises(
+ IOError,
+ Submodule._config_parser,
+ rwrepo,
+ rwrepo.commit(self.k_no_subm_tag),
+ True,
+ )
# @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, ## ACTUALLY skipped by `git.submodule.base#L869`.
# "FIXME: fails with: PermissionError: [WinError 32] The process cannot access the file because"
@@ -420,11 +463,14 @@ class TestSubmodule(TestBase):
def test_base_bare(self, rwrepo):
self._do_base_tests(rwrepo)
- @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, """
+ @skipIf(
+ HIDE_WINDOWS_KNOWN_ERRORS,
+ """
File "C:\\projects\\gitpython\\git\\cmd.py", line 559, in execute
raise GitCommandNotFound(command, err)
git.exc.GitCommandNotFound: Cmd('git') not found due to: OSError('[WinError 6] The handle is invalid')
- cmdline: git clone -n --shared -v C:\\projects\\gitpython\\.git Users\\appveyor\\AppData\\Local\\Temp\\1\\tmplyp6kr_rnon_bare_test_root_module""") # noqa E501
+ cmdline: git clone -n --shared -v C:\\projects\\gitpython\\.git Users\\appveyor\\AppData\\Local\\Temp\\1\\tmplyp6kr_rnon_bare_test_root_module""",
+ ) # noqa E501
@with_rw_repo(k_subm_current, bare=False)
def test_root_module(self, rwrepo):
# Can query everything without problems
@@ -447,10 +493,12 @@ class TestSubmodule(TestBase):
# deep traversal gitdb / async
rsmsp = [sm.path for sm in rm.traverse()]
- assert len(rsmsp) >= 2 # gitdb and async [and smmap], async being a child of gitdb
+ assert (
+ len(rsmsp) >= 2
+ ) # gitdb and async [and smmap], async being a child of gitdb
# cannot set the parent commit as root module's path didn't exist
- self.assertRaises(ValueError, rm.set_parent_commit, 'HEAD')
+ self.assertRaises(ValueError, rm.set_parent_commit, "HEAD")
# TEST UPDATE
#############
@@ -460,16 +508,18 @@ class TestSubmodule(TestBase):
# modify path without modifying the index entry
# ( which is what the move method would do properly )
- #==================================================
+ # ==================================================
sm = rm.children()[0]
pp = "path/prefix"
fp = join_path_native(pp, sm.path)
prep = sm.path
- assert not sm.module_exists() # was never updated after rwrepo's clone
+ assert not sm.module_exists() # was never updated after rwrepo's clone
# assure we clone from a local source
with sm.config_writer() as writer:
- writer.set_value('url', Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path)))
+ writer.set_value(
+ "url", Git.polish_url(osp.join(self.rorepo.working_tree_dir, sm.path))
+ )
# dry-run does nothing
sm.update(recursive=False, dry_run=True, progress=prog)
@@ -478,7 +528,9 @@ class TestSubmodule(TestBase):
sm.update(recursive=False)
assert sm.module_exists()
with sm.config_writer() as writer:
- writer.set_value('path', fp) # change path to something with prefix AFTER url change
+ writer.set_value(
+ "path", fp
+ ) # change path to something with prefix AFTER url change
# update doesn't fail, because list_items ignores the wrong path in such situations.
rm.update(recursive=False)
@@ -488,7 +540,7 @@ class TestSubmodule(TestBase):
self.assertRaises(InvalidGitRepositoryError, sm.move, pp)
# reset the path(cache) to where it was, now it works
sm.path = prep
- sm.move(fp, module=False) # leave it at the old location
+ sm.move(fp, module=False) # leave it at the old location
assert not sm.module_exists()
cpathchange = rwrepo.index.commit("changed sm path") # finally we can commit
@@ -499,12 +551,16 @@ class TestSubmodule(TestBase):
assert sm.module_exists()
# add submodule
- #================
+ # ================
nsmn = "newsubmodule"
nsmp = "submrepo"
- subrepo_url = Git.polish_url(osp.join(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1]))
+ subrepo_url = Git.polish_url(
+ osp.join(self.rorepo.working_tree_dir, rsmsp[0], rsmsp[1])
+ )
nsm = Submodule.add(rwrepo, nsmn, nsmp, url=subrepo_url)
- csmadded = rwrepo.index.commit("Added submodule").hexsha # make sure we don't keep the repo reference
+ csmadded = rwrepo.index.commit(
+ "Added submodule"
+ ).hexsha # make sure we don't keep the repo reference
nsm.set_parent_commit(csmadded)
assert nsm.module_exists()
# in our case, the module should not exist, which happens if we update a parent
@@ -520,11 +576,11 @@ class TestSubmodule(TestBase):
assert nsm.module_exists()
# remove submodule - the previous one
- #====================================
+ # ====================================
sm.set_parent_commit(csmadded)
smp = sm.abspath
assert not sm.remove(module=False).exists()
- assert osp.isdir(smp) # module still exists
+ assert osp.isdir(smp) # module still exists
csmremoved = rwrepo.index.commit("Removed submodule")
# an update will remove the module
@@ -535,37 +591,45 @@ class TestSubmodule(TestBase):
# when removing submodules, we may get new commits as nested submodules are auto-committing changes
# to allow deletions without force, as the index would be dirty otherwise.
# QUESTION: Why does this seem to work in test_git_submodule_compatibility() ?
- self.assertRaises(InvalidGitRepositoryError, rm.update, recursive=False, force_remove=False)
+ self.assertRaises(
+ InvalidGitRepositoryError, rm.update, recursive=False, force_remove=False
+ )
rm.update(recursive=False, force_remove=True)
assert not osp.isdir(smp)
# 'apply work' to the nested submodule and assure this is not removed/altered during updates
# Need to commit first, otherwise submodule.update wouldn't have a reason to change the head
- touch(osp.join(nsm.module().working_tree_dir, 'new-file'))
+ touch(osp.join(nsm.module().working_tree_dir, "new-file"))
# We cannot expect is_dirty to even run as we wouldn't reset a head to the same location
assert nsm.module().head.commit.hexsha == nsm.hexsha
nsm.module().index.add([nsm])
nsm.module().index.commit("added new file")
- rm.update(recursive=False, dry_run=True, progress=prog) # would not change head, and thus doesn't fail
+ rm.update(
+ recursive=False, dry_run=True, progress=prog
+ ) # would not change head, and thus doesn't fail
# Everything we can do from now on will trigger the 'future' check, so no is_dirty() check will even run
# This would only run if our local branch is in the past and we have uncommitted changes
prev_commit = nsm.module().head.commit
rm.update(recursive=False, dry_run=False, progress=prog)
- assert prev_commit == nsm.module().head.commit, "head shouldn't change, as it is in future of remote branch"
+ assert (
+ prev_commit == nsm.module().head.commit
+ ), "head shouldn't change, as it is in future of remote branch"
# this kills the new file
rm.update(recursive=True, progress=prog, force_reset=True)
- assert prev_commit != nsm.module().head.commit, "head changed, as the remote url and its commit changed"
+ assert (
+ prev_commit != nsm.module().head.commit
+ ), "head changed, as the remote url and its commit changed"
# change url ...
- #===============
+ # ===============
# ... to the first repository, this way we have a fast checkout, and a completely different
# repository at the different url
nsm.set_parent_commit(csmremoved)
nsmurl = Git.polish_url(osp.join(self.rorepo.working_tree_dir, rsmsp[0]))
with nsm.config_writer() as writer:
- writer.set_value('url', nsmurl)
+ writer.set_value("url", nsmurl)
csmpathchange = rwrepo.index.commit("changed url")
nsm.set_parent_commit(csmpathchange)
@@ -579,7 +643,9 @@ class TestSubmodule(TestBase):
assert nsm.module().remotes.origin.url == nsmurl
assert prev_commit != nsm.module().head.commit, "Should now point to gitdb"
assert len(rwrepo.submodules) == 1
- assert not rwrepo.submodules[0].children()[0].module_exists(), "nested submodule should not be checked out"
+ assert (
+ not rwrepo.submodules[0].children()[0].module_exists()
+ ), "nested submodule should not be checked out"
# add the submodule's changed commit to the index, which is what the
# user would do
@@ -588,7 +654,7 @@ class TestSubmodule(TestBase):
rwrepo.index.add([nsm])
# change branch
- #=================
+ # =================
# we only have one branch, so we switch to a virtual one, and back
# to the current one to trigger the difference
cur_branch = nsm.branch
@@ -603,7 +669,7 @@ class TestSubmodule(TestBase):
# Lets remove our tracking branch to simulate some changes
nsmmh = nsmm.head
- assert nsmmh.ref.tracking_branch() is None # never set it up until now
+ assert nsmmh.ref.tracking_branch() is None # never set it up until now
assert not nsmmh.is_detached
# dry run does nothing
@@ -625,8 +691,8 @@ class TestSubmodule(TestBase):
# assure we pull locally only
nsmc = nsm.children()[0]
with nsmc.config_writer() as writer:
- writer.set_value('url', subrepo_url)
- rm.update(recursive=True, progress=prog, dry_run=True) # just to run the code
+ writer.set_value("url", subrepo_url)
+ rm.update(recursive=True, progress=prog, dry_run=True) # just to run the code
rm.update(recursive=True, progress=prog)
# gitdb: has either 1 or 2 submodules depending on the version
@@ -636,41 +702,57 @@ class TestSubmodule(TestBase):
def test_first_submodule(self, rwrepo):
assert len(list(rwrepo.iter_submodules())) == 0
- for sm_name, sm_path in (('first', 'submodules/first'),
- ('second', osp.join(rwrepo.working_tree_dir, 'submodules/second'))):
- sm = rwrepo.create_submodule(sm_name, sm_path, rwrepo.git_dir, no_checkout=True)
+ for sm_name, sm_path in (
+ ("first", "submodules/first"),
+ ("second", osp.join(rwrepo.working_tree_dir, "submodules/second")),
+ ):
+ sm = rwrepo.create_submodule(
+ sm_name, sm_path, rwrepo.git_dir, no_checkout=True
+ )
assert sm.exists() and sm.module_exists()
rwrepo.index.commit("Added submodule " + sm_name)
# end for each submodule path to add
- self.assertRaises(ValueError, rwrepo.create_submodule, 'fail', osp.expanduser('~'))
- self.assertRaises(ValueError, rwrepo.create_submodule, 'fail-too',
- rwrepo.working_tree_dir + osp.sep)
+ self.assertRaises(
+ ValueError, rwrepo.create_submodule, "fail", osp.expanduser("~")
+ )
+ self.assertRaises(
+ ValueError,
+ rwrepo.create_submodule,
+ "fail-too",
+ rwrepo.working_tree_dir + osp.sep,
+ )
@with_rw_directory
def test_add_empty_repo(self, rwdir):
- empty_repo_dir = osp.join(rwdir, 'empty-repo')
+ empty_repo_dir = osp.join(rwdir, "empty-repo")
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
git.Repo.init(empty_repo_dir)
for checkout_mode in range(2):
- name = 'empty' + str(checkout_mode)
- self.assertRaises(ValueError, parent.create_submodule, name, name,
- url=empty_repo_dir, no_checkout=checkout_mode and True or False)
+ name = "empty" + str(checkout_mode)
+ self.assertRaises(
+ ValueError,
+ parent.create_submodule,
+ name,
+ name,
+ url=empty_repo_dir,
+ no_checkout=checkout_mode and True or False,
+ )
# end for each checkout mode
@with_rw_directory
def test_list_only_valid_submodules(self, rwdir):
- repo_path = osp.join(rwdir, 'parent')
+ repo_path = osp.join(rwdir, "parent")
repo = git.Repo.init(repo_path)
- repo.git.submodule('add', self._small_repo_url(), 'module')
+ repo.git.submodule("add", self._small_repo_url(), "module")
repo.index.commit("add submodule")
assert len(repo.submodules) == 1
# Delete the directory from submodule
- submodule_path = osp.join(repo_path, 'module')
+ submodule_path = osp.join(repo_path, "module")
shutil.rmtree(submodule_path)
repo.git.add([submodule_path])
repo.index.commit("remove submodule")
@@ -678,18 +760,20 @@ class TestSubmodule(TestBase):
repo = git.Repo(repo_path)
assert len(repo.submodules) == 0
- @skipIf(HIDE_WINDOWS_KNOWN_ERRORS,
- """FIXME on cygwin: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute
+ @skipIf(
+ HIDE_WINDOWS_KNOWN_ERRORS,
+ """FIXME on cygwin: File "C:\\projects\\gitpython\\git\\cmd.py", line 671, in execute
raise GitCommandError(command, status, stderr_value, stdout_value)
GitCommandError: Cmd('git') failed due to: exit code(128)
cmdline: git add 1__Xava verbXXten 1_test _myfile 1_test_other_file 1_XXava-----verbXXten
stderr: 'fatal: pathspec '"1__çava verböten"' did not match any files'
FIXME on appveyor: see https://ci.appveyor.com/project/Byron/gitpython/build/1.0.185
- """)
+ """,
+ )
@with_rw_directory
def test_git_submodules_and_add_sm_with_new_commit(self, rwdir):
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- parent.git.submodule('add', self._small_repo_url(), 'module')
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ parent.git.submodule("add", self._small_repo_url(), "module")
parent.index.commit("added submodule")
assert len(parent.submodules) == 1
@@ -697,9 +781,11 @@ class TestSubmodule(TestBase):
assert sm.exists() and sm.module_exists()
- clone = git.Repo.clone_from(self._small_repo_url(),
- osp.join(parent.working_tree_dir, 'existing-subrepository'))
- sm2 = parent.create_submodule('nongit-file-submodule', clone.working_tree_dir)
+ clone = git.Repo.clone_from(
+ self._small_repo_url(),
+ osp.join(parent.working_tree_dir, "existing-subrepository"),
+ )
+ sm2 = parent.create_submodule("nongit-file-submodule", clone.working_tree_dir)
assert len(parent.submodules) == 2
for _ in range(2):
@@ -709,26 +795,28 @@ class TestSubmodule(TestBase):
# end for each init state
# end for each iteration
- sm.move(sm.path + '_moved')
- sm2.move(sm2.path + '_moved')
+ sm.move(sm.path + "_moved")
+ sm2.move(sm2.path + "_moved")
parent.index.commit("moved submodules")
with sm.config_writer() as writer:
- writer.set_value('user.email', 'example@example.com')
- writer.set_value('user.name', 'me')
+ writer.set_value("user.email", "example@example.com")
+ writer.set_value("user.name", "me")
smm = sm.module()
- fp = osp.join(smm.working_tree_dir, 'empty-file')
- with open(fp, 'w'):
+ fp = osp.join(smm.working_tree_dir, "empty-file")
+ with open(fp, "w"):
pass
smm.git.add(Git.polish_url(fp))
smm.git.commit(m="new file added")
# submodules are retrieved from the current commit's tree, therefore we can't really get a new submodule
# object pointing to the new submodule commit
- sm_too = parent.submodules['module_moved']
+ sm_too = parent.submodules["module_moved"]
assert parent.head.commit.tree[sm.path].binsha == sm.binsha
- assert sm_too.binsha == sm.binsha, "cached submodule should point to the same commit as updated one"
+ assert (
+ sm_too.binsha == sm.binsha
+ ), "cached submodule should point to the same commit as updated one"
added_bies = parent.index.add([sm]) # added base-index-entries
assert len(added_bies) == 1
@@ -751,21 +839,24 @@ class TestSubmodule(TestBase):
# "'C:\\Users\\appveyor\\AppData\\Local\\Temp\\1\\test_work_tree_unsupportedryfa60di\\master_repo\\.git\\objects\\pack\\pack-bc9e0787aef9f69e1591ef38ea0a6f566ec66fe3.idx") # noqa E501
@with_rw_directory
def test_git_submodule_compatibility(self, rwdir):
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_path = join_path_native('submodules', 'intermediate', 'one')
- sm = parent.create_submodule('mymodules/myname', sm_path, url=self._small_repo_url())
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_path = join_path_native("submodules", "intermediate", "one")
+ sm = parent.create_submodule(
+ "mymodules/myname", sm_path, url=self._small_repo_url()
+ )
parent.index.commit("added submodule")
def assert_exists(sm, value=True):
assert sm.exists() == value
assert sm.module_exists() == value
+
# end
# As git is backwards compatible itself, it would still recognize what we do here ... unless we really
# muss it up. That's the only reason why the test is still here ... .
assert len(parent.git.submodule().splitlines()) == 1
- module_repo_path = osp.join(sm.module().working_tree_dir, '.git')
+ module_repo_path = osp.join(sm.module().working_tree_dir, ".git")
assert module_repo_path.startswith(osp.join(parent.working_tree_dir, sm_path))
if not sm._need_gitfile_submodules(parent.git):
assert osp.isdir(module_repo_path)
@@ -773,17 +864,22 @@ class TestSubmodule(TestBase):
else:
assert osp.isfile(module_repo_path)
assert sm.module().has_separate_working_tree()
- assert find_submodule_git_dir(module_repo_path) is not None, "module pointed to by .git file must be valid"
+ assert (
+ find_submodule_git_dir(module_repo_path) is not None
+ ), "module pointed to by .git file must be valid"
# end verify submodule 'style'
# test move
- new_sm_path = join_path_native('submodules', 'one')
+ new_sm_path = join_path_native("submodules", "one")
sm.move(new_sm_path)
assert_exists(sm)
# Add additional submodule level
- csm = sm.module().create_submodule('nested-submodule', join_path_native('nested-submodule', 'working-tree'),
- url=self._small_repo_url())
+ csm = sm.module().create_submodule(
+ "nested-submodule",
+ join_path_native("nested-submodule", "working-tree"),
+ url=self._small_repo_url(),
+ )
sm.module().index.commit("added nested submodule")
sm_head_commit = sm.module().commit()
assert_exists(csm)
@@ -796,24 +892,36 @@ class TestSubmodule(TestBase):
# rename nested submodule
# This name would move itself one level deeper - needs special handling internally
- new_name = csm.name + '/mine'
+ new_name = csm.name + "/mine"
assert csm.rename(new_name).name == new_name
assert_exists(csm)
- assert csm.repo.is_dirty(index=True, working_tree=False), "index must contain changed .gitmodules file"
+ assert csm.repo.is_dirty(
+ index=True, working_tree=False
+ ), "index must contain changed .gitmodules file"
csm.repo.index.commit("renamed module")
# keep_going evaluation
rsm = parent.submodule_update()
assert_exists(sm)
assert_exists(csm)
- with csm.config_writer().set_value('url', 'bar'):
+ with csm.config_writer().set_value("url", "bar"):
pass
- csm.repo.index.commit("Have to commit submodule change for algorithm to pick it up")
- assert csm.url == 'bar'
-
- self.assertRaises(Exception, rsm.update, recursive=True, to_latest_revision=True, progress=prog)
+ csm.repo.index.commit(
+ "Have to commit submodule change for algorithm to pick it up"
+ )
+ assert csm.url == "bar"
+
+ self.assertRaises(
+ Exception,
+ rsm.update,
+ recursive=True,
+ to_latest_revision=True,
+ progress=prog,
+ )
assert_exists(csm)
- rsm.update(recursive=True, to_latest_revision=True, progress=prog, keep_going=True)
+ rsm.update(
+ recursive=True, to_latest_revision=True, progress=prog, keep_going=True
+ )
# remove
sm_module_path = sm.module().git_dir
@@ -826,8 +934,8 @@ class TestSubmodule(TestBase):
@with_rw_directory
def test_remove_norefs(self, rwdir):
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_name = 'mymodules/myname'
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_name = "mymodules/myname"
sm = parent.create_submodule(sm_name, sm_name, url=self._small_repo_url())
assert sm.exists()
@@ -835,24 +943,26 @@ class TestSubmodule(TestBase):
assert sm.repo is parent # yoh was surprised since expected sm repo!!
# so created a new instance for submodule
- smrepo = git.Repo(osp.join(rwdir, 'parent', sm.path))
+ smrepo = git.Repo(osp.join(rwdir, "parent", sm.path))
# Adding a remote without fetching so would have no references
- smrepo.create_remote('special', 'git@server-shouldnotmatter:repo.git')
+ smrepo.create_remote("special", "git@server-shouldnotmatter:repo.git")
# And we should be able to remove it just fine
sm.remove()
assert not sm.exists()
@with_rw_directory
def test_rename(self, rwdir):
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_name = 'mymodules/myname'
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_name = "mymodules/myname"
sm = parent.create_submodule(sm_name, sm_name, url=self._small_repo_url())
parent.index.commit("Added submodule")
assert sm.rename(sm_name) is sm and sm.name == sm_name
- assert not sm.repo.is_dirty(index=True, working_tree=False, untracked_files=False)
+ assert not sm.repo.is_dirty(
+ index=True, working_tree=False, untracked_files=False
+ )
- new_path = 'renamed/myname'
+ new_path = "renamed/myname"
assert sm.move(new_path).name == new_path
new_sm_name = "shortname"
@@ -861,8 +971,12 @@ class TestSubmodule(TestBase):
assert sm.exists()
sm_mod = sm.module()
- if osp.isfile(osp.join(sm_mod.working_tree_dir, '.git')) == sm._need_gitfile_submodules(parent.git):
- assert sm_mod.git_dir.endswith(join_path_native('.git', 'modules', new_sm_name))
+ if osp.isfile(
+ osp.join(sm_mod.working_tree_dir, ".git")
+ ) == sm._need_gitfile_submodules(parent.git):
+ assert sm_mod.git_dir.endswith(
+ join_path_native(".git", "modules", new_sm_name)
+ )
# end
@with_rw_directory
@@ -870,33 +984,41 @@ class TestSubmodule(TestBase):
# Setup initial sandbox:
# parent repo has one submodule, which has all the latest changes
source_url = self._small_repo_url()
- sm_source_repo = git.Repo.clone_from(source_url, osp.join(rw_dir, 'sm-source'), b='master')
- parent_repo = git.Repo.init(osp.join(rw_dir, 'parent'))
- sm = parent_repo.create_submodule('mysubmodule', 'subdir/submodule',
- sm_source_repo.working_tree_dir, branch='master')
- parent_repo.index.commit('added submodule')
+ sm_source_repo = git.Repo.clone_from(
+ source_url, osp.join(rw_dir, "sm-source"), b="master"
+ )
+ parent_repo = git.Repo.init(osp.join(rw_dir, "parent"))
+ sm = parent_repo.create_submodule(
+ "mysubmodule",
+ "subdir/submodule",
+ sm_source_repo.working_tree_dir,
+ branch="master",
+ )
+ parent_repo.index.commit("added submodule")
assert sm.exists()
# Create feature branch with one new commit in submodule source
- sm_fb = sm_source_repo.create_head('feature')
+ sm_fb = sm_source_repo.create_head("feature")
sm_fb.checkout()
- new_file = touch(osp.join(sm_source_repo.working_tree_dir, 'new-file'))
+ new_file = touch(osp.join(sm_source_repo.working_tree_dir, "new-file"))
sm_source_repo.index.add([new_file])
sm.repo.index.commit("added new file")
# change designated submodule checkout branch to the new upstream feature branch
with sm.config_writer() as smcw:
- smcw.set_value('branch', sm_fb.name)
+ smcw.set_value("branch", sm_fb.name)
assert sm.repo.is_dirty(index=True, working_tree=False)
sm.repo.index.commit("changed submodule branch to '%s'" % sm_fb)
# verify submodule update with feature branch that leaves currently checked out branch in it's past
sm_mod = sm.module()
prev_commit = sm_mod.commit()
- assert sm_mod.head.ref.name == 'master'
+ assert sm_mod.head.ref.name == "master"
assert parent_repo.submodule_update()
assert sm_mod.head.ref.name == sm_fb.name
- assert sm_mod.commit() == prev_commit, "Without to_latest_revision, we don't change the commit"
+ assert (
+ sm_mod.commit() == prev_commit
+ ), "Without to_latest_revision, we don't change the commit"
assert parent_repo.submodule_update(to_latest_revision=True)
assert sm_mod.head.ref.name == sm_fb.name
@@ -904,25 +1026,29 @@ class TestSubmodule(TestBase):
# Create new branch which is in our past, and thus seemingly unrelated to the currently checked out one
# To make it even 'harder', we shall fork and create a new commit
- sm_pfb = sm_source_repo.create_head('past-feature', commit='HEAD~20')
+ sm_pfb = sm_source_repo.create_head("past-feature", commit="HEAD~20")
sm_pfb.checkout()
- sm_source_repo.index.add([touch(osp.join(sm_source_repo.working_tree_dir, 'new-file'))])
+ sm_source_repo.index.add(
+ [touch(osp.join(sm_source_repo.working_tree_dir, "new-file"))]
+ )
sm_source_repo.index.commit("new file added, to past of '%r'" % sm_fb)
# Change designated submodule checkout branch to a new commit in its own past
with sm.config_writer() as smcw:
- smcw.set_value('branch', sm_pfb.path)
+ smcw.set_value("branch", sm_pfb.path)
sm.repo.index.commit("changed submodule branch to '%s'" % sm_pfb)
# Test submodule updates - must fail if submodule is dirty
- touch(osp.join(sm_mod.working_tree_dir, 'unstaged file'))
+ touch(osp.join(sm_mod.working_tree_dir, "unstaged file"))
# This doesn't fail as our own submodule binsha didn't change, and the reset is only triggered if
# to latest revision is True.
parent_repo.submodule_update(to_latest_revision=False)
sm_mod.head.ref.name == sm_pfb.name, "should have been switched to past head"
sm_mod.commit() == sm_fb.commit, "Head wasn't reset"
- self.assertRaises(RepositoryDirtyError, parent_repo.submodule_update, to_latest_revision=True)
+ self.assertRaises(
+ RepositoryDirtyError, parent_repo.submodule_update, to_latest_revision=True
+ )
parent_repo.submodule_update(to_latest_revision=True, force_reset=True)
assert sm_mod.commit() == sm_pfb.commit, "Now head should have been reset"
assert sm_mod.head.ref.name == sm_pfb.name
@@ -930,83 +1056,116 @@ class TestSubmodule(TestBase):
@skipIf(not is_win, "Specifically for Windows.")
def test_to_relative_path_with_super_at_root_drive(self):
class Repo(object):
- working_tree_dir = 'D:\\'
+ working_tree_dir = "D:\\"
+
super_repo = Repo()
- submodule_path = 'D:\\submodule_path'
+ submodule_path = "D:\\submodule_path"
relative_path = Submodule._to_relative_path(super_repo, submodule_path)
- msg = '_to_relative_path should be "submodule_path" but was "%s"' % relative_path
- assert relative_path == 'submodule_path', msg
-
- @skipIf(True, 'for some unknown reason the assertion fails, even though it in fact is working in more common setup')
+ msg = (
+ '_to_relative_path should be "submodule_path" but was "%s"' % relative_path
+ )
+ assert relative_path == "submodule_path", msg
+
+ @skipIf(
+ True,
+ "for some unknown reason the assertion fails, even though it in fact is working in more common setup",
+ )
@with_rw_directory
def test_depth(self, rwdir):
- parent = git.Repo.init(osp.join(rwdir, 'test_depth'))
- sm_name = 'mymodules/myname'
+ parent = git.Repo.init(osp.join(rwdir, "test_depth"))
+ sm_name = "mymodules/myname"
sm_depth = 1
- sm = parent.create_submodule(sm_name, sm_name, url=self._small_repo_url(), depth=sm_depth)
+ sm = parent.create_submodule(
+ sm_name, sm_name, url=self._small_repo_url(), depth=sm_depth
+ )
self.assertEqual(len(list(sm.module().iter_commits())), sm_depth)
@with_rw_directory
def test_update_clone_multi_options_argument(self, rwdir):
- #Arrange
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_name = 'foo'
+ # Arrange
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_name = "foo"
sm_url = self._small_repo_url()
- sm_branch = 'refs/heads/master'
+ sm_branch = "refs/heads/master"
sm_hexsha = git.Repo(self._small_repo_url()).head.commit.hexsha
- sm = Submodule(parent, bytes.fromhex(sm_hexsha), name=sm_name, path=sm_name, url=sm_url,
- branch_path=sm_branch)
-
- #Act
- sm.update(init=True, clone_multi_options=['--config core.eol=true'])
-
- #Assert
- sm_config = GitConfigParser(file_or_files=osp.join(parent.git_dir, 'modules', sm_name, 'config'))
- self.assertTrue(sm_config.get_value('core', 'eol'))
+ sm = Submodule(
+ parent,
+ bytes.fromhex(sm_hexsha),
+ name=sm_name,
+ path=sm_name,
+ url=sm_url,
+ branch_path=sm_branch,
+ )
+
+ # Act
+ sm.update(init=True, clone_multi_options=["--config core.eol=true"])
+
+ # Assert
+ sm_config = GitConfigParser(
+ file_or_files=osp.join(parent.git_dir, "modules", sm_name, "config")
+ )
+ self.assertTrue(sm_config.get_value("core", "eol"))
@with_rw_directory
def test_update_no_clone_multi_options_argument(self, rwdir):
- #Arrange
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_name = 'foo'
+ # Arrange
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_name = "foo"
sm_url = self._small_repo_url()
- sm_branch = 'refs/heads/master'
+ sm_branch = "refs/heads/master"
sm_hexsha = git.Repo(self._small_repo_url()).head.commit.hexsha
- sm = Submodule(parent, bytes.fromhex(sm_hexsha), name=sm_name, path=sm_name, url=sm_url,
- branch_path=sm_branch)
-
- #Act
+ sm = Submodule(
+ parent,
+ bytes.fromhex(sm_hexsha),
+ name=sm_name,
+ path=sm_name,
+ url=sm_url,
+ branch_path=sm_branch,
+ )
+
+ # Act
sm.update(init=True)
- #Assert
- sm_config = GitConfigParser(file_or_files=osp.join(parent.git_dir, 'modules', sm_name, 'config'))
+ # Assert
+ sm_config = GitConfigParser(
+ file_or_files=osp.join(parent.git_dir, "modules", sm_name, "config")
+ )
with self.assertRaises(cp.NoOptionError):
- sm_config.get_value('core', 'eol')
+ sm_config.get_value("core", "eol")
@with_rw_directory
def test_add_clone_multi_options_argument(self, rwdir):
- #Arrange
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_name = 'foo'
-
- #Act
- Submodule.add(parent, sm_name, sm_name, url=self._small_repo_url(),
- clone_multi_options=['--config core.eol=true'])
-
- #Assert
- sm_config = GitConfigParser(file_or_files=osp.join(parent.git_dir, 'modules', sm_name, 'config'))
- self.assertTrue(sm_config.get_value('core', 'eol'))
+ # Arrange
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_name = "foo"
+
+ # Act
+ Submodule.add(
+ parent,
+ sm_name,
+ sm_name,
+ url=self._small_repo_url(),
+ clone_multi_options=["--config core.eol=true"],
+ )
+
+ # Assert
+ sm_config = GitConfigParser(
+ file_or_files=osp.join(parent.git_dir, "modules", sm_name, "config")
+ )
+ self.assertTrue(sm_config.get_value("core", "eol"))
@with_rw_directory
def test_add_no_clone_multi_options_argument(self, rwdir):
- #Arrange
- parent = git.Repo.init(osp.join(rwdir, 'parent'))
- sm_name = 'foo'
+ # Arrange
+ parent = git.Repo.init(osp.join(rwdir, "parent"))
+ sm_name = "foo"
- #Act
+ # Act
Submodule.add(parent, sm_name, sm_name, url=self._small_repo_url())
- #Assert
- sm_config = GitConfigParser(file_or_files=osp.join(parent.git_dir, 'modules', sm_name, 'config'))
+ # Assert
+ sm_config = GitConfigParser(
+ file_or_files=osp.join(parent.git_dir, "modules", sm_name, "config")
+ )
with self.assertRaises(cp.NoOptionError):
- sm_config.get_value('core', 'eol')
+ sm_config.get_value("core", "eol")
diff --git a/test/test_tree.py b/test/test_tree.py
index 24c401cb..97067fb2 100644
--- a/test/test_tree.py
+++ b/test/test_tree.py
@@ -7,10 +7,7 @@
from io import BytesIO
from unittest import skipIf
-from git.objects import (
- Tree,
- Blob
-)
+from git.objects import Tree, Blob
from test.lib import TestBase
from git.util import HIDE_WINDOWS_KNOWN_ERRORS
@@ -18,22 +15,24 @@ import os.path as osp
class TestTree(TestBase):
-
- @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, """
+ @skipIf(
+ HIDE_WINDOWS_KNOWN_ERRORS,
+ """
File "C:\\projects\\gitpython\\git\\cmd.py", line 559, in execute
raise GitCommandNotFound(command, err)
git.exc.GitCommandNotFound: Cmd('git') not found due to: OSError('[WinError 6] The handle is invalid')
- cmdline: git cat-file --batch-check""")
+ cmdline: git cat-file --batch-check""",
+ )
def test_serializable(self):
# tree at the given commit contains a submodule as well
- roottree = self.rorepo.tree('6c1faef799095f3990e9970bc2cb10aa0221cf9c')
+ roottree = self.rorepo.tree("6c1faef799095f3990e9970bc2cb10aa0221cf9c")
for item in roottree.traverse(ignore_self=False):
if item.type != Tree.type:
continue
# END skip non-trees
tree = item
# trees have no dict
- self.assertRaises(AttributeError, setattr, tree, 'someattr', 1)
+ self.assertRaises(AttributeError, setattr, tree, "someattr", 1)
orig_data = tree.data_stream.read()
orig_cache = tree._cache
@@ -43,22 +42,25 @@ class TestTree(TestBase):
assert stream.getvalue() == orig_data
stream.seek(0)
- testtree = Tree(self.rorepo, Tree.NULL_BIN_SHA, 0, '')
+ testtree = Tree(self.rorepo, Tree.NULL_BIN_SHA, 0, "")
testtree._deserialize(stream)
assert testtree._cache == orig_cache
# replaces cache, but we make sure of it
- del(testtree._cache)
+ del testtree._cache
testtree._deserialize(stream)
# END for each item in tree
- @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, """
+ @skipIf(
+ HIDE_WINDOWS_KNOWN_ERRORS,
+ """
File "C:\\projects\\gitpython\\git\\cmd.py", line 559, in execute
raise GitCommandNotFound(command, err)
git.exc.GitCommandNotFound: Cmd('git') not found due to: OSError('[WinError 6] The handle is invalid')
- cmdline: git cat-file --batch-check""")
+ cmdline: git cat-file --batch-check""",
+ )
def test_traverse(self):
- root = self.rorepo.tree('0.1.6')
+ root = self.rorepo.tree("0.1.6")
num_recursive = 0
all_items = []
for obj in root.traverse():
@@ -72,7 +74,7 @@ class TestTree(TestBase):
# limit recursion level to 0 - should be same as default iteration
assert all_items
- assert 'CHANGES' in root
+ assert "CHANGES" in root
assert len(list(root)) == len(list(root.traverse(depth=1)))
# only choose trees
@@ -87,7 +89,9 @@ class TestTree(TestBase):
# trees and blobs
assert len(set(trees) | set(root.trees)) == len(trees)
- assert len({b for b in root if isinstance(b, Blob)} | set(root.blobs)) == len(root.blobs)
+ assert len({b for b in root if isinstance(b, Blob)} | set(root.blobs)) == len(
+ root.blobs
+ )
subitem = trees[0][0]
assert "/" in subitem.path
assert subitem.name == osp.basename(subitem.path)
@@ -96,7 +100,7 @@ class TestTree(TestBase):
found_slash = False
for item in root.traverse():
assert osp.isabs(item.abspath)
- if '/' in item.path:
+ if "/" in item.path:
found_slash = True
# END check for slash
diff --git a/test/test_util.py b/test/test_util.py
index a213b46c..b2903620 100644
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -21,7 +21,8 @@ from git.objects.util import (
verify_utctz,
parse_date,
tzoffset,
- from_timestamp)
+ from_timestamp,
+)
from test.lib import (
TestBase,
with_rw_repo,
@@ -39,35 +40,31 @@ from git.util import (
_norm_cygpath_pairs = (
- (r'foo\bar', 'foo/bar'),
- (r'foo/bar', 'foo/bar'),
-
- (r'C:\Users', '/cygdrive/c/Users'),
- (r'C:\d/e', '/cygdrive/c/d/e'),
-
- ('C:\\', '/cygdrive/c/'),
-
- (r'\\server\C$\Users', '//server/C$/Users'),
- (r'\\server\C$', '//server/C$'),
- ('\\\\server\\c$\\', '//server/c$/'),
- (r'\\server\BAR/', '//server/BAR/'),
-
- (r'D:/Apps', '/cygdrive/d/Apps'),
- (r'D:/Apps\fOO', '/cygdrive/d/Apps/fOO'),
- (r'D:\Apps/123', '/cygdrive/d/Apps/123'),
+ (r"foo\bar", "foo/bar"),
+ (r"foo/bar", "foo/bar"),
+ (r"C:\Users", "/cygdrive/c/Users"),
+ (r"C:\d/e", "/cygdrive/c/d/e"),
+ ("C:\\", "/cygdrive/c/"),
+ (r"\\server\C$\Users", "//server/C$/Users"),
+ (r"\\server\C$", "//server/C$"),
+ ("\\\\server\\c$\\", "//server/c$/"),
+ (r"\\server\BAR/", "//server/BAR/"),
+ (r"D:/Apps", "/cygdrive/d/Apps"),
+ (r"D:/Apps\fOO", "/cygdrive/d/Apps/fOO"),
+ (r"D:\Apps/123", "/cygdrive/d/Apps/123"),
)
_unc_cygpath_pairs = (
- (r'\\?\a:\com', '/cygdrive/a/com'),
- (r'\\?\a:/com', '/cygdrive/a/com'),
-
- (r'\\?\UNC\server\D$\Apps', '//server/D$/Apps'),
+ (r"\\?\a:\com", "/cygdrive/a/com"),
+ (r"\\?\a:/com", "/cygdrive/a/com"),
+ (r"\\?\UNC\server\D$\Apps", "//server/D$/Apps"),
)
class TestIterableMember(object):
"""A member of an iterable list"""
+
__slots__ = "name"
def __init__(self, name):
@@ -79,7 +76,6 @@ class TestIterableMember(object):
@ddt.ddt
class TestUtils(TestBase):
-
def setup(self):
self.testdict = {
"string": "42",
@@ -96,11 +92,11 @@ class TestUtils(TestBase):
@skipIf(not is_win, "Paths specifically for Windows.")
@ddt.data(
- (r'./bar', 'bar'),
- (r'.\bar', 'bar'),
- (r'../bar', '../bar'),
- (r'..\bar', '../bar'),
- (r'../bar/.\foo/../chu', '../bar/chu'),
+ (r"./bar", "bar"),
+ (r".\bar", "bar"),
+ (r"../bar", "../bar"),
+ (r"..\bar", "../bar"),
+ (r"../bar/.\foo/../chu", "../bar/chu"),
)
def test_cygpath_norm_ok(self, case):
wpath, cpath = case
@@ -109,27 +105,27 @@ class TestUtils(TestBase):
@skipIf(not is_win, "Paths specifically for Windows.")
@ddt.data(
- r'C:',
- r'C:Relative',
- r'D:Apps\123',
- r'D:Apps/123',
- r'\\?\a:rel',
- r'\\share\a:rel',
+ r"C:",
+ r"C:Relative",
+ r"D:Apps\123",
+ r"D:Apps/123",
+ r"\\?\a:rel",
+ r"\\share\a:rel",
)
def test_cygpath_invalids(self, wpath):
cwpath = cygpath(wpath)
- self.assertEqual(cwpath, wpath.replace('\\', '/'), wpath)
+ self.assertEqual(cwpath, wpath.replace("\\", "/"), wpath)
@skipIf(not is_win, "Paths specifically for Windows.")
@ddt.idata(_norm_cygpath_pairs)
def test_decygpath(self, case):
wpath, cpath = case
wcpath = decygpath(cpath)
- self.assertEqual(wcpath, wpath.replace('/', '\\'), cpath)
+ self.assertEqual(wcpath, wpath.replace("/", "\\"), cpath)
def test_it_should_dashify(self):
- self.assertEqual('this-is-my-argument', dashify('this_is_my_argument'))
- self.assertEqual('foo', dashify('foo'))
+ self.assertEqual("this-is-my-argument", dashify("this_is_my_argument"))
+ self.assertEqual("foo", dashify("foo"))
def test_lock_file(self):
my_file = tempfile.mktemp()
@@ -154,7 +150,7 @@ class TestUtils(TestBase):
self.assertRaises(IOError, lock_file._obtain_lock_or_raise)
# auto-release on destruction
- del(other_lock_file)
+ del other_lock_file
lock_file._obtain_lock_or_raise()
lock_file._release_lock()
@@ -176,12 +172,18 @@ class TestUtils(TestBase):
self.assertLess(elapsed, wait_time + extra_time)
def test_user_id(self):
- self.assertIn('@', get_user_id())
+ self.assertIn("@", get_user_id())
def test_parse_date(self):
# parse_date(from_timestamp()) must return the tuple unchanged
- for timestamp, offset in (1522827734, -7200), (1522827734, 0), (1522827734, +3600):
- self.assertEqual(parse_date(from_timestamp(timestamp, offset)), (timestamp, offset))
+ for timestamp, offset in (
+ (1522827734, -7200),
+ (1522827734, 0),
+ (1522827734, +3600),
+ ):
+ self.assertEqual(
+ parse_date(from_timestamp(timestamp, offset)), (timestamp, offset)
+ )
# test all supported formats
def assert_rval(rval, veri_time, offset=0):
@@ -195,6 +197,7 @@ class TestUtils(TestBase):
utctz = altz_to_utctz_str(offset)
self.assertIsInstance(utctz, str)
self.assertEqual(utctz_to_altz(verify_utctz(utctz)), offset)
+
# END assert rval utility
rfc = ("Thu, 07 Apr 2005 22:13:11 +0000", 0)
@@ -203,16 +206,16 @@ class TestUtils(TestBase):
iso3 = ("2005.04.07 22:13:11 -0000", 0)
alt = ("04/07/2005 22:13:11", 0)
alt2 = ("07.04.2005 22:13:11", 0)
- veri_time_utc = 1112911991 # the time this represents, in time since epoch, UTC
+ veri_time_utc = 1112911991 # the time this represents, in time since epoch, UTC
for date, offset in (rfc, iso, iso2, iso3, alt, alt2):
assert_rval(parse_date(date), veri_time_utc, offset)
# END for each date type
# and failure
self.assertRaises(ValueError, parse_date, datetime.now()) # non-aware datetime
- self.assertRaises(ValueError, parse_date, 'invalid format')
- self.assertRaises(ValueError, parse_date, '123456789 -02000')
- self.assertRaises(ValueError, parse_date, ' 123456789 -0200')
+ self.assertRaises(ValueError, parse_date, "invalid format")
+ self.assertRaises(ValueError, parse_date, "123456789 -02000")
+ self.assertRaises(ValueError, parse_date, " 123456789 -0200")
def test_actor(self):
for cr in (None, self.rorepo.config_reader()):
@@ -220,23 +223,23 @@ class TestUtils(TestBase):
self.assertIsInstance(Actor.author(cr), Actor)
# END assure config reader is handled
- @with_rw_repo('HEAD')
+ @with_rw_repo("HEAD")
@mock.patch("getpass.getuser")
def test_actor_get_uid_laziness_not_called(self, rwrepo, mock_get_uid):
with rwrepo.config_writer() as cw:
cw.set_value("user", "name", "John Config Doe")
cw.set_value("user", "email", "jcdoe@example.com")
-
+
cr = rwrepo.config_reader()
committer = Actor.committer(cr)
author = Actor.author(cr)
-
- self.assertEqual(committer.name, 'John Config Doe')
- self.assertEqual(committer.email, 'jcdoe@example.com')
- self.assertEqual(author.name, 'John Config Doe')
- self.assertEqual(author.email, 'jcdoe@example.com')
+
+ self.assertEqual(committer.name, "John Config Doe")
+ self.assertEqual(committer.email, "jcdoe@example.com")
+ self.assertEqual(author.name, "John Config Doe")
+ self.assertEqual(author.email, "jcdoe@example.com")
self.assertFalse(mock_get_uid.called)
-
+
env = {
"GIT_AUTHOR_NAME": "John Doe",
"GIT_AUTHOR_EMAIL": "jdoe@example.com",
@@ -247,10 +250,10 @@ class TestUtils(TestBase):
for cr in (None, rwrepo.config_reader()):
committer = Actor.committer(cr)
author = Actor.author(cr)
- self.assertEqual(committer.name, 'Jane Doe')
- self.assertEqual(committer.email, 'jane@example.com')
- self.assertEqual(author.name, 'John Doe')
- self.assertEqual(author.email, 'jdoe@example.com')
+ self.assertEqual(committer.name, "Jane Doe")
+ self.assertEqual(committer.email, "jane@example.com")
+ self.assertEqual(author.name, "John Doe")
+ self.assertEqual(author.email, "jdoe@example.com")
self.assertFalse(mock_get_uid.called)
@mock.patch("getpass.getuser")
@@ -260,20 +263,22 @@ class TestUtils(TestBase):
author = Actor.author(None)
# We can't test with `self.rorepo.config_reader()` here, as the uuid laziness
# depends on whether the user running the test has their global user.name config set.
- self.assertEqual(committer.name, 'user')
- self.assertTrue(committer.email.startswith('user@'))
- self.assertEqual(author.name, 'user')
- self.assertTrue(committer.email.startswith('user@'))
+ self.assertEqual(committer.name, "user")
+ self.assertTrue(committer.email.startswith("user@"))
+ self.assertEqual(author.name, "user")
+ self.assertTrue(committer.email.startswith("user@"))
self.assertTrue(mock_get_uid.called)
self.assertEqual(mock_get_uid.call_count, 2)
def test_actor_from_string(self):
self.assertEqual(Actor._from_string("name"), Actor("name", None))
self.assertEqual(Actor._from_string("name <>"), Actor("name", ""))
- self.assertEqual(Actor._from_string("name last another <some-very-long-email@example.com>"),
- Actor("name last another", "some-very-long-email@example.com"))
+ self.assertEqual(
+ Actor._from_string("name last another <some-very-long-email@example.com>"),
+ Actor("name last another", "some-very-long-email@example.com"),
+ )
- @ddt.data(('name', ''), ('name', 'prefix_'))
+ @ddt.data(("name", ""), ("name", "prefix_"))
def test_iterable_list(self, case):
name, prefix = case
ilist = IterableList(name, prefix)
@@ -292,7 +297,7 @@ class TestUtils(TestBase):
self.assertIn(name2, ilist)
self.assertIn(m2, ilist)
self.assertIn(m2, ilist)
- self.assertNotIn('invalid', ilist)
+ self.assertNotIn("invalid", ilist)
# with string index
self.assertIs(ilist[name1], m1)
@@ -307,34 +312,43 @@ class TestUtils(TestBase):
self.assertIs(ilist.two, m2)
# test exceptions
- self.assertRaises(AttributeError, getattr, ilist, 'something')
- self.assertRaises(IndexError, ilist.__getitem__, 'something')
+ self.assertRaises(AttributeError, getattr, ilist, "something")
+ self.assertRaises(IndexError, ilist.__getitem__, "something")
# delete by name and index
- self.assertRaises(IndexError, ilist.__delitem__, 'something')
- del(ilist[name2])
+ self.assertRaises(IndexError, ilist.__delitem__, "something")
+ del ilist[name2]
self.assertEqual(len(ilist), 1)
self.assertNotIn(name2, ilist)
self.assertIn(name1, ilist)
- del(ilist[0])
+ del ilist[0]
self.assertNotIn(name1, ilist)
self.assertEqual(len(ilist), 0)
self.assertRaises(IndexError, ilist.__delitem__, 0)
- self.assertRaises(IndexError, ilist.__delitem__, 'something')
+ self.assertRaises(IndexError, ilist.__delitem__, "something")
def test_from_timestamp(self):
# Correct offset: UTC+2, should return datetime + tzoffset(+2)
- altz = utctz_to_altz('+0200')
- self.assertEqual(datetime.fromtimestamp(1522827734, tzoffset(altz)), from_timestamp(1522827734, altz))
+ altz = utctz_to_altz("+0200")
+ self.assertEqual(
+ datetime.fromtimestamp(1522827734, tzoffset(altz)),
+ from_timestamp(1522827734, altz),
+ )
# Wrong offset: UTC+58, should return datetime + tzoffset(UTC)
- altz = utctz_to_altz('+5800')
- self.assertEqual(datetime.fromtimestamp(1522827734, tzoffset(0)), from_timestamp(1522827734, altz))
+ altz = utctz_to_altz("+5800")
+ self.assertEqual(
+ datetime.fromtimestamp(1522827734, tzoffset(0)),
+ from_timestamp(1522827734, altz),
+ )
# Wrong offset: UTC-9000, should return datetime + tzoffset(UTC)
- altz = utctz_to_altz('-9000')
- self.assertEqual(datetime.fromtimestamp(1522827734, tzoffset(0)), from_timestamp(1522827734, altz))
+ altz = utctz_to_altz("-9000")
+ self.assertEqual(
+ datetime.fromtimestamp(1522827734, tzoffset(0)),
+ from_timestamp(1522827734, altz),
+ )
def test_pickle_tzoffset(self):
t1 = tzoffset(555)
@@ -345,7 +359,9 @@ class TestUtils(TestBase):
def test_remove_password_from_command_line(self):
username = "fakeuser"
password = "fakepassword1234"
- url_with_user_and_pass = "https://{}:{}@fakerepo.example.com/testrepo".format(username, password)
+ url_with_user_and_pass = "https://{}:{}@fakerepo.example.com/testrepo".format(
+ username, password
+ )
url_with_user = "https://{}@fakerepo.example.com/testrepo".format(username)
url_with_pass = "https://:{}@fakerepo.example.com/testrepo".format(password)
url_without_user_or_pass = "https://fakerepo.example.com/testrepo"
diff --git a/test/tstrunner.py b/test/tstrunner.py
index a3bcfa3c..441050c6 100644
--- a/test/tstrunner.py
+++ b/test/tstrunner.py
@@ -1,6 +1,7 @@
import unittest
+
loader = unittest.TestLoader()
-start_dir = '.'
+start_dir = "."
suite = loader.discover(start_dir)
runner = unittest.TextTestRunner()