summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLubomir Rintel <lkundrak@v3.sk>2023-03-23 12:43:12 +0100
committerLubomir Rintel <lkundrak@v3.sk>2023-03-23 12:43:12 +0100
commitb290e91551d566b92151180950fda29a1f7be725 (patch)
treee8730ab74b8d0382b885d717acded23e97b53d18
parentd001bae6bd4cdc379d66048c9464c8afcfe0bc53 (diff)
parentf07da04cd9f16ac9e90d3d57d970ac935ad87b4d (diff)
downloadNetworkManager-b290e91551d566b92151180950fda29a1f7be725.tar.gz
merge: branch 'lr/cloud-setup-test'
https://gitlab.freedesktop.org/NetworkManager/NetworkManager/-/merge_requests/1554
-rw-r--r--Makefile.am10
-rw-r--r--src/nm-cloud-setup/nm-http-client.c3
-rw-r--r--src/tests/client/meson.build18
-rwxr-xr-xsrc/tests/client/test-client.py599
-rwxr-xr-xsrc/tests/client/test-client.sh5
-rwxr-xr-xtools/test-cloud-meta-mock.py90
-rwxr-xr-xtools/test-networkmanager-service.py44
7 files changed, 541 insertions, 228 deletions
diff --git a/Makefile.am b/Makefile.am
index 6fc3d035b6..cd30c7201e 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -5468,10 +5468,17 @@ endif
###############################################################################
check-local-tests-client: src/nmcli/nmcli src/tests/client/test-client.py
- LIBTOOL="$(LIBTOOL)" "$(srcdir)/src/tests/client/test-client.sh" "$(builddir)" "$(srcdir)" "$(PYTHON)" --
+ LIBTOOL="$(LIBTOOL)" "$(srcdir)/src/tests/client/test-client.sh" "$(builddir)" "$(srcdir)" "$(PYTHON)" -- TestNmcli
check_local += check-local-tests-client
+check-local-tests-cloud-setup: src/nm-cloud-setup/nm-cloud-setup src/tests/client/test-client.py
+ LIBTOOL="$(LIBTOOL)" "$(srcdir)/src/tests/client/test-client.sh" "$(builddir)" "$(srcdir)" "$(PYTHON)" -- TestNmCloudSetup
+
+if BUILD_NM_CLOUD_SETUP
+check_local += check-local-tests-cloud-setup
+endif
+
CLEANFILES += src/tests/client/test-client.log
EXTRA_DIST += \
@@ -5812,6 +5819,7 @@ EXTRA_DIST += \
tools/enums-to-docbook.pl \
tools/meson-post-install.sh \
tools/run-nm-test.sh \
+ tools/test-cloud-meta-mock.py \
tools/test-networkmanager-service.py \
tools/test-sudo-wrapper.sh \
\
diff --git a/src/nm-cloud-setup/nm-http-client.c b/src/nm-cloud-setup/nm-http-client.c
index 6d37beeefb..49f893e281 100644
--- a/src/nm-cloud-setup/nm-http-client.c
+++ b/src/nm-cloud-setup/nm-http-client.c
@@ -290,7 +290,7 @@ nm_http_client_req(NMHttpClient *self,
nmcs_wait_for_objects_register(edata->task);
- _LOG2D(edata, "start get ...");
+ _LOG2D(edata, "start %s ...", http_method ?: "get");
edata->ehandle = curl_easy_init();
if (!edata->ehandle) {
@@ -567,6 +567,7 @@ nm_http_client_poll_req(NMHttpClient *self,
.check_user_data = check_user_data,
.response_code = -1,
.http_headers = NULL,
+ .http_method = http_method,
};
if (http_headers) {
diff --git a/src/tests/client/meson.build b/src/tests/client/meson.build
index 6dc0f2a2c8..1c349cc1ee 100644
--- a/src/tests/client/meson.build
+++ b/src/tests/client/meson.build
@@ -8,9 +8,27 @@ test(
source_root,
python.path(),
'--',
+ 'TestNmcli',
],
env: [
'LIBTOOL=',
],
timeout: 120,
)
+
+if enable_nm_cloud_setup
+ test(
+ 'check-local-tests-cloud-setup',
+ find_program(join_paths(source_root, 'src/tests/client/test-client.sh')),
+ args: [
+ build_root,
+ source_root,
+ python.path(),
+ '--',
+ 'TestNmCloudSetup',
+ ],
+ env: [
+ 'LIBTOOL=',
+ ],
+ )
+endif
diff --git a/src/tests/client/test-client.py b/src/tests/client/test-client.py
index e5febe964f..e603203e3e 100755
--- a/src/tests/client/test-client.py
+++ b/src/tests/client/test-client.py
@@ -68,6 +68,10 @@ ENV_NM_TEST_CLIENT_BUILDDIR = "NM_TEST_CLIENT_BUILDDIR"
# In particular, you can test also a nmcli binary installed somewhere else.
ENV_NM_TEST_CLIENT_NMCLI_PATH = "NM_TEST_CLIENT_NMCLI_PATH"
+# (optional) Path to nm-cloud-setup. By default, it looks for nm-cloud-setup
+# in build dir.
+ENV_NM_TEST_CLIENT_CLOUD_SETUP_PATH = "NM_TEST_CLIENT_CLOUD_SETUP_PATH"
+
# (optional) The test also compares tranlsated output (l10n). This requires,
# that you first install the translation in the right place. So, by default,
# if a test for a translation fails, it will mark the test as skipped, and not
@@ -140,6 +144,12 @@ try:
except ImportError:
pexpect = None
+try:
+ from http.server import HTTPServer
+ from http.server import BaseHTTPRequestHandler
+except ImportError:
+ HTTPServer = None
+
###############################################################################
@@ -165,6 +175,14 @@ class PathConfiguration:
return v
@staticmethod
+ def test_cloud_meta_mock_path():
+ v = os.path.abspath(
+ PathConfiguration.top_srcdir() + "/tools/test-cloud-meta-mock.py"
+ )
+ assert os.path.exists(v), 'Cannot find cloud metadata mock server at "%s"' % (v)
+ return v
+
+ @staticmethod
def canonical_script_filename():
p = "src/tests/client/test-client.py"
assert (PathConfiguration.top_srcdir() + "/" + p) == os.path.abspath(__file__)
@@ -551,6 +569,20 @@ class Configuration:
pass
if not os.path.exists(v):
raise Exception("Missing nmcli binary. Set NM_TEST_CLIENT_NMCLI_PATH?")
+ elif name == ENV_NM_TEST_CLIENT_CLOUD_SETUP_PATH:
+ v = os.environ.get(ENV_NM_TEST_CLIENT_CLOUD_SETUP_PATH, None)
+ if v is None:
+ try:
+ v = os.path.abspath(
+ self.get(ENV_NM_TEST_CLIENT_BUILDDIR)
+ + "/src/nm-cloud-setup/nm-cloud-setup"
+ )
+ except:
+ pass
+ if not os.path.exists(v):
+ raise Exception(
+ "Missing nm-cloud-setup binary. Set NM_TEST_CLIENT_CLOUD_SETUP_PATH?"
+ )
elif name == ENV_NM_TEST_CLIENT_CHECK_L10N:
# if we test locales other than 'C', the output of nmcli depends on whether
# nmcli can load the translations. Unfortunately, I cannot find a way to
@@ -751,6 +783,16 @@ class NMStubServer:
iface_name = ""
self.op_SetProperties([(path, [(iface_name, [(propname, value)])])])
+ def addAndActivateConnection(
+ self, connection, device, specific_object="", delay=None
+ ):
+ if delay is not None:
+ self.op_SetActiveConnectionStateChangedDelay(device, delay)
+ nm_iface = self._conn_get_main_object(self._conn)
+ self.op_AddAndActivateConnection(
+ connection, device, specific_object, dbus_iface=nm_iface
+ )
+
###############################################################################
@@ -835,7 +877,7 @@ class AsyncProcess:
MAX_JOBS = 15
-class TestNmcli(unittest.TestCase):
+class TestNmClient(unittest.TestCase):
def __init__(self, *args, **kwargs):
self._calling_num = {}
self._skip_test_for_l10n_diff = []
@@ -888,13 +930,53 @@ class TestNmcli(unittest.TestCase):
return content_expect, results_expect
- def nmcli_construct_argv(self, args, with_valgrind=None):
+ def _env(
+ self, lang="C", calling_num=None, fatal_warnings=_DEFAULT_ARG, extra_env=None
+ ):
+ if lang == "C":
+ language = ""
+ elif lang == "de_DE.utf8":
+ language = "de"
+ elif lang == "pl_PL.UTF-8":
+ language = "pl"
+ else:
+ self.fail("invalid language %s" % (lang))
+
+ env = {}
+ for k in [
+ "LD_LIBRARY_PATH",
+ "DBUS_SESSION_BUS_ADDRESS",
+ "LIBNM_CLIENT_DEBUG",
+ "LIBNM_CLIENT_DEBUG_FILE",
+ ]:
+ val = os.environ.get(k, None)
+ if val is not None:
+ env[k] = val
+ env["LANG"] = lang
+ env["LANGUAGE"] = language
+ env["LIBNM_USE_SESSION_BUS"] = "1"
+ env["LIBNM_USE_NO_UDEV"] = "1"
+ env["TERM"] = "linux"
+ env["ASAN_OPTIONS"] = conf.get(ENV_NM_TEST_ASAN_OPTIONS)
+ env["LSAN_OPTIONS"] = conf.get(ENV_NM_TEST_LSAN_OPTIONS)
+ env["LBSAN_OPTIONS"] = conf.get(ENV_NM_TEST_UBSAN_OPTIONS)
+ env["XDG_CONFIG_HOME"] = PathConfiguration.srcdir()
+ if calling_num is not None:
+ env["NM_TEST_CALLING_NUM"] = str(calling_num)
+ if fatal_warnings is _DEFAULT_ARG or fatal_warnings:
+ env["G_DEBUG"] = "fatal-warnings"
+ if extra_env is not None:
+ for k, v in extra_env.items():
+ env[k] = v
+ return env
+
+ def cmd_construct_argv(self, cmd_path, args, with_valgrind=None):
if with_valgrind is None:
with_valgrind = conf.get(ENV_NM_TEST_VALGRIND)
valgrind_log = None
- cmd = conf.get(ENV_NM_TEST_CLIENT_NMCLI_PATH)
+ cmd = conf.get(cmd_path)
if with_valgrind:
valgrind_log = tempfile.mkstemp(prefix="nm-test-client-valgrind.")
argv = [
@@ -921,6 +1003,174 @@ class TestNmcli(unittest.TestCase):
argv.extend(args)
return argv, valgrind_log
+ def call_pexpect(self, cmd_path, args, extra_env):
+ argv, valgrind_log = self.cmd_construct_argv(cmd_path, args)
+ env = self._env(extra_env=extra_env)
+
+ pexp = pexpect.spawn(argv[0], argv[1:], timeout=10, env=env)
+
+ typ = collections.namedtuple("CallPexpect", ["pexp", "valgrind_log"])
+ return typ(pexp, valgrind_log)
+
+ def async_start(self, wait_all=False):
+
+ while True:
+
+ while True:
+ for async_job in list(self._async_jobs[0:MAX_JOBS]):
+ async_job.start()
+ # start up to MAX_JOBS jobs, but poll() and complete those
+ # that are already exited. Retry, until there are no more
+ # jobs to start, or until MAX_JOBS are running.
+ jobs_running = []
+ for async_job in list(self._async_jobs[0:MAX_JOBS]):
+ if async_job.poll() is not None:
+ self._async_jobs.remove(async_job)
+ async_job.wait_and_complete()
+ continue
+ jobs_running.append(async_job)
+ if len(jobs_running) >= len(self._async_jobs):
+ break
+ if len(jobs_running) >= MAX_JOBS:
+ break
+
+ if not jobs_running:
+ return
+ if not wait_all:
+ return
+
+ # in a loop, indefinitely poll the running jobs until we find one that
+ # completes. Note that poll() itself will raise an exception if a
+ # jobs times out.
+ for async_job in Util.random_job(jobs_running):
+ if async_job.poll(timeout=0.03) is not None:
+ self._async_jobs.remove(async_job)
+ async_job.wait_and_complete()
+ break
+
+ def async_wait(self):
+ return self.async_start(wait_all=True)
+
+ def _nm_test_post(self):
+
+ self.async_wait()
+
+ self.srv_shutdown()
+
+ self._calling_num = None
+
+ results = self._results
+ self._results = None
+
+ if len(results) == 0:
+ return
+
+ skip_test_for_l10n_diff = self._skip_test_for_l10n_diff
+ self._skip_test_for_l10n_diff = None
+
+ test_name = self._testMethodName
+
+ filename = os.path.abspath(
+ PathConfiguration.srcdir()
+ + "/test-client.check-on-disk/"
+ + test_name
+ + ".expected"
+ )
+
+ regenerate = conf.get(ENV_NM_TEST_REGENERATE)
+
+ content_expect, results_expect = self._read_expected(filename)
+
+ if results_expect is None:
+ if not regenerate:
+ self.fail(
+ "Failed to parse expected file '%s'. Let the test write the file by rerunning with NM_TEST_REGENERATE=1"
+ % (filename)
+ )
+ else:
+ for i in range(0, min(len(results_expect), len(results))):
+ n = results[i]
+ if results_expect[i] == n["content"]:
+ continue
+ if regenerate:
+ continue
+ if n["ignore_l10n_diff"]:
+ skip_test_for_l10n_diff.append(n["test_name"])
+ continue
+ print(
+ "\n\n\nThe file '%s' does not have the expected content:"
+ % (filename)
+ )
+ print("ACTUAL OUTPUT:\n[[%s]]\n" % (n["content"]))
+ print("EXPECT OUTPUT:\n[[%s]]\n" % (results_expect[i]))
+ print(
+ "Let the test write the file by rerunning with NM_TEST_REGENERATE=1"
+ )
+ print(
+ "See howto in %s for details.\n"
+ % (PathConfiguration.canonical_script_filename())
+ )
+ sys.stdout.flush()
+ self.fail(
+ "Unexpected output of command, expected %s. Rerun test with NM_TEST_REGENERATE=1 to regenerate files"
+ % (filename)
+ )
+ if len(results_expect) != len(results):
+ if not regenerate:
+ print(
+ "\n\n\nThe number of tests in %s does not match the expected content (%s vs %s):"
+ % (filename, len(results_expect), len(results))
+ )
+ if len(results_expect) < len(results):
+ print(
+ "ACTUAL OUTPUT:\n[[%s]]\n"
+ % (results[len(results_expect)]["content"])
+ )
+ else:
+ print(
+ "EXPECT OUTPUT:\n[[%s]]\n" % (results_expect[len(results)])
+ )
+ print(
+ "Let the test write the file by rerunning with NM_TEST_REGENERATE=1"
+ )
+ print(
+ "See howto in %s for details.\n"
+ % (PathConfiguration.canonical_script_filename())
+ )
+ sys.stdout.flush()
+ self.fail(
+ "Unexpected output of command, expected %s. Rerun test with NM_TEST_REGENERATE=1 to regenerate files"
+ % (filename)
+ )
+
+ if regenerate:
+ content_new = b"".join([r["content"] for r in results])
+ if content_new != content_expect:
+ try:
+ with open(filename, "wb") as content_file:
+ content_file.write(content_new)
+ except Exception as e:
+ self.fail("Failure to write '%s': %s" % (filename, e))
+
+ if skip_test_for_l10n_diff:
+ # nmcli loads translations from the installation path. This failure commonly
+ # happens because you did not install the binary in the --prefix, before
+ # running the test. Hence, translations are not available or differ.
+ self.skipTest(
+ "Skipped asserting for localized tests %s. Set NM_TEST_CLIENT_CHECK_L10N=1 to force fail."
+ % (",".join(skip_test_for_l10n_diff))
+ )
+
+ def setUp(self):
+ if not dbus_session_inited:
+ self.skipTest(
+ "Own D-Bus session for testing is not initialized. Do you have dbus-run-session available?"
+ )
+ if NM is None:
+ self.skipTest("gi.NM is not available. Did you build with introspection?")
+
+
+class TestNmcli(TestNmClient):
def call_nmcli_l(
self,
args,
@@ -1004,54 +1254,7 @@ class TestNmcli(unittest.TestCase):
)
def call_nmcli_pexpect(self, args):
-
- env = self._env(extra_env={"NO_COLOR": "1"})
- argv, valgrind_log = self.nmcli_construct_argv(args)
-
- pexp = pexpect.spawn(argv[0], argv[1:], timeout=10, env=env)
-
- typ = collections.namedtuple("CallNmcliPexpect", ["pexp", "valgrind_log"])
- return typ(pexp, valgrind_log)
-
- def _env(
- self, lang="C", calling_num=None, fatal_warnings=_DEFAULT_ARG, extra_env=None
- ):
- if lang == "C":
- language = ""
- elif lang == "de_DE.utf8":
- language = "de"
- elif lang == "pl_PL.UTF-8":
- language = "pl"
- else:
- self.fail("invalid language %s" % (lang))
-
- env = {}
- for k in [
- "LD_LIBRARY_PATH",
- "DBUS_SESSION_BUS_ADDRESS",
- "LIBNM_CLIENT_DEBUG",
- "LIBNM_CLIENT_DEBUG_FILE",
- ]:
- val = os.environ.get(k, None)
- if val is not None:
- env[k] = val
- env["LANG"] = lang
- env["LANGUAGE"] = language
- env["LIBNM_USE_SESSION_BUS"] = "1"
- env["LIBNM_USE_NO_UDEV"] = "1"
- env["TERM"] = "linux"
- env["ASAN_OPTIONS"] = conf.get(ENV_NM_TEST_ASAN_OPTIONS)
- env["LSAN_OPTIONS"] = conf.get(ENV_NM_TEST_LSAN_OPTIONS)
- env["LBSAN_OPTIONS"] = conf.get(ENV_NM_TEST_UBSAN_OPTIONS)
- env["XDG_CONFIG_HOME"] = PathConfiguration.srcdir()
- if calling_num is not None:
- env["NM_TEST_CALLING_NUM"] = str(calling_num)
- if fatal_warnings is _DEFAULT_ARG or fatal_warnings:
- env["G_DEBUG"] = "fatal-warnings"
- if extra_env is not None:
- for k, v in extra_env.items():
- env[k] = v
- return env
+ return self.call_pexpect(ENV_NM_TEST_CLIENT_NMCLI_PATH, args, {"NO_COLOR": "1"})
def _call_nmcli(
self,
@@ -1113,7 +1316,9 @@ class TestNmcli(unittest.TestCase):
self.fail("invalid language %s" % (lang))
# Running under valgrind is not yet supported for those tests.
- args, valgrind_log = self.nmcli_construct_argv(args, with_valgrind=False)
+ args, valgrind_log = self.cmd_construct_argv(
+ ENV_NM_TEST_CLIENT_NMCLI_PATH, args, with_valgrind=False
+ )
assert valgrind_log is None
@@ -1232,163 +1437,6 @@ class TestNmcli(unittest.TestCase):
self.async_start(wait_all=sync_barrier)
- def async_start(self, wait_all=False):
-
- while True:
-
- while True:
- for async_job in list(self._async_jobs[0:MAX_JOBS]):
- async_job.start()
- # start up to MAX_JOBS jobs, but poll() and complete those
- # that are already exited. Retry, until there are no more
- # jobs to start, or until MAX_JOBS are running.
- jobs_running = []
- for async_job in list(self._async_jobs[0:MAX_JOBS]):
- if async_job.poll() is not None:
- self._async_jobs.remove(async_job)
- async_job.wait_and_complete()
- continue
- jobs_running.append(async_job)
- if len(jobs_running) >= len(self._async_jobs):
- break
- if len(jobs_running) >= MAX_JOBS:
- break
-
- if not jobs_running:
- return
- if not wait_all:
- return
-
- # in a loop, indefinitely poll the running jobs until we find one that
- # completes. Note that poll() itself will raise an exception if a
- # jobs times out.
- for async_job in Util.random_job(jobs_running):
- if async_job.poll(timeout=0.03) is not None:
- self._async_jobs.remove(async_job)
- async_job.wait_and_complete()
- break
-
- def async_wait(self):
- return self.async_start(wait_all=True)
-
- def _nm_test_post(self):
-
- self.async_wait()
-
- self.srv_shutdown()
-
- self._calling_num = None
-
- results = self._results
- self._results = None
-
- if len(results) == 0:
- return
-
- skip_test_for_l10n_diff = self._skip_test_for_l10n_diff
- self._skip_test_for_l10n_diff = None
-
- test_name = self._testMethodName
-
- filename = os.path.abspath(
- PathConfiguration.srcdir()
- + "/test-client.check-on-disk/"
- + test_name
- + ".expected"
- )
-
- regenerate = conf.get(ENV_NM_TEST_REGENERATE)
-
- content_expect, results_expect = self._read_expected(filename)
-
- if results_expect is None:
- if not regenerate:
- self.fail(
- "Failed to parse expected file '%s'. Let the test write the file by rerunning with NM_TEST_REGENERATE=1"
- % (filename)
- )
- else:
- for i in range(0, min(len(results_expect), len(results))):
- n = results[i]
- if results_expect[i] == n["content"]:
- continue
- if regenerate:
- continue
- if n["ignore_l10n_diff"]:
- skip_test_for_l10n_diff.append(n["test_name"])
- continue
- print(
- "\n\n\nThe file '%s' does not have the expected content:"
- % (filename)
- )
- print("ACTUAL OUTPUT:\n[[%s]]\n" % (n["content"]))
- print("EXPECT OUTPUT:\n[[%s]]\n" % (results_expect[i]))
- print(
- "Let the test write the file by rerunning with NM_TEST_REGENERATE=1"
- )
- print(
- "See howto in %s for details.\n"
- % (PathConfiguration.canonical_script_filename())
- )
- sys.stdout.flush()
- self.fail(
- "Unexpected output of command, expected %s. Rerun test with NM_TEST_REGENERATE=1 to regenerate files"
- % (filename)
- )
- if len(results_expect) != len(results):
- if not regenerate:
- print(
- "\n\n\nThe number of tests in %s does not match the expected content (%s vs %s):"
- % (filename, len(results_expect), len(results))
- )
- if len(results_expect) < len(results):
- print(
- "ACTUAL OUTPUT:\n[[%s]]\n"
- % (results[len(results_expect)]["content"])
- )
- else:
- print(
- "EXPECT OUTPUT:\n[[%s]]\n" % (results_expect[len(results)])
- )
- print(
- "Let the test write the file by rerunning with NM_TEST_REGENERATE=1"
- )
- print(
- "See howto in %s for details.\n"
- % (PathConfiguration.canonical_script_filename())
- )
- sys.stdout.flush()
- self.fail(
- "Unexpected output of command, expected %s. Rerun test with NM_TEST_REGENERATE=1 to regenerate files"
- % (filename)
- )
-
- if regenerate:
- content_new = b"".join([r["content"] for r in results])
- if content_new != content_expect:
- try:
- with open(filename, "wb") as content_file:
- content_file.write(content_new)
- except Exception as e:
- self.fail("Failure to write '%s': %s" % (filename, e))
-
- if skip_test_for_l10n_diff:
- # nmcli loads translations from the installation path. This failure commonly
- # happens because you did not install the binary in the --prefix, before
- # running the test. Hence, translations are not available or differ.
- self.skipTest(
- "Skipped asserting for localized tests %s. Set NM_TEST_CLIENT_CHECK_L10N=1 to force fail."
- % (",".join(skip_test_for_l10n_diff))
- )
-
- def skip_without_pexpect(func):
- def f(self):
- if pexpect is None:
- raise unittest.SkipTest("pexpect not available")
- func(self)
-
- return f
-
def nm_test(func):
def f(self):
self.srv_start()
@@ -1404,13 +1452,13 @@ class TestNmcli(unittest.TestCase):
return f
- def setUp(self):
- if not dbus_session_inited:
- self.skipTest(
- "Own D-Bus session for testing is not initialized. Do you have dbus-run-session available?"
- )
- if NM is None:
- self.skipTest("gi.NM is not available. Did you build with introspection?")
+ def skip_without_pexpect(func):
+ def f(self):
+ if pexpect is None:
+ raise unittest.SkipTest("pexpect not available")
+ func(self)
+
+ return f
def init_001(self):
self.srv.op_AddObj("WiredDevice", iface="eth0")
@@ -2088,6 +2136,125 @@ class TestNmcli(unittest.TestCase):
###############################################################################
+class TestNmCloudSetup(TestNmClient):
+ def cloud_setup_test(func):
+ """
+ Runs the mock NetworkManager along with a mock cloud metadata service.
+ """
+
+ def f(self):
+ if pexpect is None:
+ raise unittest.SkipTest("pexpect not available")
+
+ s = socket.socket()
+ s.set_inheritable(True)
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+ s.bind(("localhost", 0))
+
+ # The same value as Python's TCPServer uses.
+ # Chosen by summoning the sprit of TCP under influence of
+ # hallucinogenic substances.
+ s.listen(5)
+
+ def pass_socket():
+ os.dup2(s.fileno(), 3, inheritable=True)
+
+ service_path = PathConfiguration.test_cloud_meta_mock_path()
+ env = os.environ.copy()
+ env["LISTEN_FDS"] = "1"
+ p = subprocess.Popen(
+ [sys.executable, service_path],
+ stdin=subprocess.PIPE,
+ env=env,
+ pass_fds=(s.fileno(),),
+ preexec_fn=pass_socket,
+ )
+
+ self.md_url = "http://%s:%d" % s.getsockname()
+ s.close()
+
+ self.srv_start()
+ func(self)
+ self._nm_test_post()
+
+ p.terminate()
+ p.wait()
+
+ return f
+
+ @cloud_setup_test
+ def test_ec2(self):
+
+ # Add a device with an active connection that has IPv4 configured
+ self.srv.op_AddObj("WiredDevice", iface="eth0")
+ self.srv.addAndActivateConnection(
+ {
+ "connection": {"type": "802-3-ethernet", "id": "con-eth0"},
+ "ipv4": {"method": "auto"},
+ },
+ "/org/freedesktop/NetworkManager/Devices/1",
+ delay=0,
+ )
+
+ # The second connection has no IPv4
+ self.srv.op_AddObj("WiredDevice", iface="eth1")
+ self.srv.addAndActivateConnection(
+ {"connection": {"type": "802-3-ethernet", "id": "con-eth1"}},
+ "/org/freedesktop/NetworkManager/Devices/2",
+ "",
+ delay=0,
+ )
+
+ # Run nm-cloud-setup for the first time
+ nmc = self.call_pexpect(
+ ENV_NM_TEST_CLIENT_CLOUD_SETUP_PATH,
+ [],
+ {
+ "NM_CLOUD_SETUP_EC2_HOST": self.md_url,
+ "NM_CLOUD_SETUP_LOG": "trace",
+ "NM_CLOUD_SETUP_EC2": "yes",
+ },
+ )
+
+ nmc.pexp.expect("provider ec2 detected")
+ nmc.pexp.expect("found interfaces: 9E:C0:3E:92:24:2D, 53:E9:7E:52:8D:A8")
+ nmc.pexp.expect("get-config: starting")
+ nmc.pexp.expect("get-config: success")
+ nmc.pexp.expect("meta data received")
+ # One of the devices has no IPv4 configuration to be modified
+ nmc.pexp.expect("device has no suitable applied connection. Skip")
+ # The other one was lacking an address set it up.
+ nmc.pexp.expect("some changes were applied for provider ec2")
+ nmc.pexp.expect(pexpect.EOF)
+
+ # Run nm-cloud-setup for the second time
+ nmc = self.call_pexpect(
+ ENV_NM_TEST_CLIENT_CLOUD_SETUP_PATH,
+ [],
+ {
+ "NM_CLOUD_SETUP_EC2_HOST": self.md_url,
+ "NM_CLOUD_SETUP_LOG": "trace",
+ "NM_CLOUD_SETUP_EC2": "yes",
+ },
+ )
+
+ nmc.pexp.expect("provider ec2 detected")
+ nmc.pexp.expect("found interfaces: 9E:C0:3E:92:24:2D, 53:E9:7E:52:8D:A8")
+ nmc.pexp.expect("get-config: starting")
+ nmc.pexp.expect("get-config: success")
+ nmc.pexp.expect("meta data received")
+ # No changes this time
+ nmc.pexp.expect('device needs no update to applied connection "con-eth0"')
+ nmc.pexp.expect("no changes were applied for provider ec2")
+ nmc.pexp.expect(pexpect.EOF)
+
+ Util.valgrind_check_log(nmc.valgrind_log, "test_ec2")
+
+
+###############################################################################
+
+
def main():
global dbus_session_inited
diff --git a/src/tests/client/test-client.sh b/src/tests/client/test-client.sh
index a636f5fb93..dd08e4d889 100755
--- a/src/tests/client/test-client.sh
+++ b/src/tests/client/test-client.sh
@@ -71,7 +71,6 @@ fi
test -d "$BUILDDIR" || die "BUILDDIR \"$BUILDDIR\" does not exist?"
test -d "$SRCDIR" || die "SRCDIR \"$SRCDIR\" does not exist?"
-test -f "$BUILDDIR/src/nmcli/nmcli" || die "\"$BUILDDIR/src/nmcli/nmcli\" does not exist?"
if test -f "$BUILDDIR/src/libnm-client-impl/.libs/libnm.so" ; then
LIBDIR="$BUILDDIR/src/libnm-client-impl/.libs"
@@ -84,6 +83,7 @@ fi
mkdir -p "$BUILDDIR/src/tests/client/" || die "failure to create build output directory \"$BUILDDIR/src/tests/client/\""
export NM_TEST_CLIENT_NMCLI_PATH="$BUILDDIR/src/nmcli/nmcli"
+export NM_TEST_CLIENT_CLOUD_SETUP_PATH="$BUILDDIR/src/nm-cloud-setup/nm-cloud-setup"
export GI_TYPELIB_PATH="$BUILDDIR/src/libnm-client-impl${GI_TYPELIB_PATH:+:$GI_TYPELIB_PATH}"
export LD_LIBRARY_PATH="$LIBDIR${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"
export NM_TEST_CLIENT_BUILDDIR="$BUILDDIR"
@@ -91,7 +91,8 @@ export NM_TEST_CLIENT_BUILDDIR="$BUILDDIR"
# Run nmcli at least once. With libtool, nmcli is a shell script and with LTO
# this seems to perform some slow setup during the first run. If we do that
# during the test, it will timeout and fail.
-"$NM_TEST_CLIENT_NMCLI_PATH" --version &>/dev/null
+"$NM_TEST_CLIENT_NMCLI_PATH" --version &>/dev/null || :
+"$NM_TEST_CLIENT_CLOUD_SETUP_PATH" --invalid &>/dev/null || :
# we first collect all the output in "test-client.log" and print it at once
# afterwards. The only reason is that when you run with `make -j` that the
diff --git a/tools/test-cloud-meta-mock.py b/tools/test-cloud-meta-mock.py
new file mode 100755
index 0000000000..262dc2ffb3
--- /dev/null
+++ b/tools/test-cloud-meta-mock.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# run: $ systemd-socket-activate -l 8000 python tools/test-cloud-meta-mock.py &
+# $ NM_CLOUD_SETUP_EC2_HOST=http://localhost:8000 \
+# NM_CLOUD_SETUP_LOG=trace \
+# NM_CLOUD_SETUP_EC2=yes src/nm-cloud-setup/nm-cloud-setup
+# or just: $ python tools/test-cloud-meta-mock.py
+
+import os
+import socket
+
+from http.server import HTTPServer
+from http.server import BaseHTTPRequestHandler
+from socketserver import BaseServer
+
+
+class MockCloudMDRequestHandler(BaseHTTPRequestHandler):
+ """
+ Respond to cloud metadata service requests.
+ Currently implements a fairly minimal subset of AWS EC2 API.
+ """
+
+ _ec2_macs = "/2018-09-24/meta-data/network/interfaces/macs/"
+ _meta_resources = {
+ "/latest/meta-data/": b"ami-id\n",
+ _ec2_macs: b"9e:c0:3e:92:24:2d\n53:e9:7e:52:8d:a8",
+ _ec2_macs + "9e:c0:3e:92:24:2d/subnet-ipv4-cidr-block": b"172.31.16.0/20",
+ _ec2_macs + "9e:c0:3e:92:24:2d/local-ipv4s": b"172.31.26.249",
+ _ec2_macs + "53:e9:7e:52:8d:a8/subnet-ipv4-cidr-block": b"172.31.166.0/20",
+ _ec2_macs + "53:e9:7e:52:8d:a8/local-ipv4s": b"172.31.176.249",
+ }
+
+ def log_message(self, format, *args):
+ pass
+
+ def do_GET(self):
+ if self.path in self._meta_resources:
+ self.send_response(200)
+ self.end_headers()
+ self.wfile.write(self._meta_resources[self.path])
+ else:
+ self.send_response(404)
+ self.end_headers()
+
+ def do_PUT(self):
+ if self.path == "/latest/api/token":
+ self.send_response(200)
+ self.end_headers()
+ self.wfile.write(
+ b"AQAAALH-k7i18JMkK-ORLZQfAa7nkNjQbKwpQPExNHqzk1oL_7eh-A=="
+ )
+ else:
+ self.send_response(404)
+ self.end_headers()
+
+
+class SocketHTTPServer(HTTPServer):
+ """
+ A HTTP server that accepts a socket (that has already been
+ listen()-ed on). This is useful when the socket is passed
+ fron the test runner.
+ """
+
+ def __init__(self, server_address, RequestHandlerClass, socket):
+ BaseServer.__init__(self, server_address, RequestHandlerClass)
+ self.socket = socket
+ self.server_address = self.socket.getsockname()
+
+
+# See sd_listen_fds(3)
+fileno = os.getenv("LISTEN_FDS")
+if fileno is not None:
+ if fileno != "1":
+ raise Exception("Bad LISTEN_FDS")
+ s = socket.socket(fileno=3)
+else:
+ addr = ("localhost", 0)
+ s = socket.socket()
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+ s.bind(addr)
+
+
+httpd = SocketHTTPServer(None, MockCloudMDRequestHandler, socket=s)
+
+print("Listening on http://%s:%d" % (httpd.server_address[0], httpd.server_address[1]))
+httpd.server_activate()
+
+httpd.serve_forever()
+httpd.server_close()
diff --git a/tools/test-networkmanager-service.py b/tools/test-networkmanager-service.py
index a7788510a5..9002f26b36 100755
--- a/tools/test-networkmanager-service.py
+++ b/tools/test-networkmanager-service.py
@@ -358,7 +358,7 @@ class Util:
[(str(k), Util.variant_from_dbus(v)) for k, v in val.items()]
),
)
- if val.signature == "sa{sv}":
+ if val.signature == "sa{sv}" or val.signature == "sa{ss}":
c = collections.OrderedDict(
[
(
@@ -825,10 +825,12 @@ class Device(ExportedObj):
ExportedObj.__init__(self, ExportedObj.create_path(Device), ident)
+ self.applied_con = {}
self.ip4_config = None
self.ip6_config = None
self.dhcp4_config = None
self.dhcp6_config = None
+ self.activation_state_change_delay_ms = 50
self.prp_state = NM.DeviceState.UNAVAILABLE
@@ -1037,6 +1039,20 @@ class Device(ExportedObj):
def Disconnect(self):
pass
+ @dbus.service.method(
+ dbus_interface=IFACE_DEVICE, in_signature="u", out_signature="a{sa{sv}}t"
+ )
+ def GetAppliedConnection(self, flags):
+ ac = self._dbus_property_get(IFACE_DEVICE, PRP_DEVICE_ACTIVE_CONNECTION)
+ return (self.applied_con, 0)
+
+ @dbus.service.method(
+ dbus_interface=IFACE_DEVICE, in_signature="a{sa{sv}}tu", out_signature=""
+ )
+ def Reapply(self, connection, version_id, flags):
+ self.applied_con = connection
+ pass
+
@dbus.service.method(dbus_interface=IFACE_DEVICE, in_signature="", out_signature="")
def Delete(self):
# We don't currently support any software device types, so...
@@ -1067,6 +1083,10 @@ class Device(ExportedObj):
def set_active_connection(self, ac):
self._dbus_property_set(IFACE_DEVICE, PRP_DEVICE_ACTIVE_CONNECTION, ac)
+ if ac is None:
+ self.applied_con = {}
+ else:
+ self.applied_con = ac.con_inst.con_hash
def connection_is_available(self, con_inst):
if con_inst.is_vpn():
@@ -1388,9 +1408,9 @@ class ActiveConnection(ExportedObj):
self.con_inst = con_inst
self.is_vpn = con_inst.is_vpn()
+ self.activation_state_change_delay_ms = device.activation_state_change_delay_ms
self._activation_id = None
self._deactivation_id = None
- self.activation_state_change_delay_ms = 50
s_con = con_inst.con_hash[NM.SETTING_CONNECTION_SETTING_NAME]
@@ -1486,9 +1506,17 @@ class ActiveConnection(ExportedObj):
def start_activation(self):
assert self._activation_id is None
- self._activation_id = GLib.timeout_add(
- self.activation_state_change_delay_ms, self._activation_step1
- )
+ if self.activation_state_change_delay_ms == 0:
+ self.device.set_active_connection(self)
+ self._set_state(
+ NM.ActiveConnectionState.ACTIVATED,
+ NM.ActiveConnectionStateReason.UNKNOWN,
+ )
+ self.device.set_state(NM.DeviceState.ACTIVATED, NM.DeviceStateReason.NONE)
+ else:
+ self._activation_id = GLib.timeout_add(
+ self.activation_state_change_delay_ms, self._activation_step1
+ )
def start_deactivation(self):
assert self._deactivation_id is None
@@ -1918,9 +1946,9 @@ class NetworkManager(ExportedObj):
@dbus.service.method(dbus_interface=IFACE_TEST, in_signature="ou", out_signature="")
def SetActiveConnectionStateChangedDelay(self, devpath, delay_ms):
- for ac in reversed(self.active_connections):
- if ac.device.path == devpath:
- ac.activation_state_change_delay_ms = delay_ms
+ for d in self.devices:
+ if d.path == devpath:
+ d.activation_state_change_delay_ms = delay_ms
return
raise BusErr.UnknownDeviceException(
"Device with iface '%s' not found" % devpath