summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrett Holman <brett.holman@canonical.com>2022-02-24 16:40:00 -0700
committergit-ubuntu importer <ubuntu-devel-discuss@lists.ubuntu.com>2022-02-25 22:27:09 +0000
commitf99c8765f8915fd9dd8bf9967347bb375ab74dd7 (patch)
treebeb900f68882b129ecff06c5c0f00d3f91f2e759
parent9e224f959c1a7e537783fc606b8d2a548beff8d8 (diff)
downloadcloud-init-git-f99c8765f8915fd9dd8bf9967347bb375ab74dd7.tar.gz
22.1-14-g2e17a0d6-0ubuntu1~22.04.1 (patches unapplied)
Imported using git-ubuntu import.
-rw-r--r--Makefile27
-rwxr-xr-xcloudinit/distros/__init__.py2
-rwxr-xr-xcloudinit/sources/DataSourceAzure.py140
-rw-r--r--cloudinit/url_helper.py16
-rw-r--r--cloudinit/util.py2
-rw-r--r--debian/apport-launcher.py3
-rw-r--r--debian/changelog25
-rw-r--r--doc/man/cloud-init.17
-rw-r--r--tests/integration_tests/clouds.py8
-rw-r--r--tests/integration_tests/modules/test_apt.py13
-rw-r--r--tests/integration_tests/modules/test_combined.py6
-rw-r--r--tests/integration_tests/modules/test_users_groups.py8
-rw-r--r--tests/unittests/sources/test_azure.py353
-rw-r--r--tests/unittests/test_util.py46
-rw-r--r--tools/.github-cla-signers1
15 files changed, 433 insertions, 224 deletions
diff --git a/Makefile b/Makefile
index 4ead786f..5f0c5b1b 100644
--- a/Makefile
+++ b/Makefile
@@ -55,8 +55,28 @@ clean_pyc:
@find . -type f -name "*.pyc" -delete
@find . -type d -name __pycache__ -delete
-clean: clean_pyc
- rm -rf doc/rtd_html .tox .coverage
+clean_pytest:
+ rm -rf .cache htmlcov
+
+clean_packaging:
+ rm -rf srpm cloud_init.egg-info/ \
+ cloud-init-*.tar.gz \
+ cloud-init-*.tar.gz.asc \
+ cloud-init.dsc \
+ cloud-init_*.build \
+ cloud-init_*.buildinfo \
+ cloud-init_*.changes \
+ cloud-init_*.deb \
+ cloud-init_*.dsc \
+ cloud-init_*.orig.tar.gz \
+ cloud-init_*.tar.xz \
+ cloud-init_*.upload
+
+clean_release:
+ rm -rf new-upstream-changes.txt commit.msg
+
+clean: clean_pyc clean_pytest clean_packaging clean_release
+ rm -rf doc/rtd_html .tox .coverage tags
yaml:
@$(PYTHON) $(CWD)/tools/validate-yaml.py $(YAML_FILES)
@@ -122,4 +142,5 @@ fix_spelling:
.PHONY: test flake8 clean rpm srpm deb deb-src yaml
.PHONY: check_version pip-test-requirements pip-requirements clean_pyc
-.PHONY: unittest style-check doc fix_spelling check_spelling
+.PHONY: unittest style-check doc fix_spelling
+.PHONY: clean_pytest clean_packaging check_spelling clean_release
diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py
index 76acd6a3..f2d9de10 100755
--- a/cloudinit/distros/__init__.py
+++ b/cloudinit/distros/__init__.py
@@ -529,6 +529,8 @@ class Distro(persistence.CloudInitPickleMixin, metaclass=abc.ABCMeta):
if not util.is_group(group):
self.create_group(group)
LOG.debug("created group '%s' for user '%s'", group, name)
+ if "uid" in kwargs.keys():
+ kwargs["uid"] = str(kwargs["uid"])
# Check the values and create the command
for key, val in sorted(kwargs.items()):
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 359dfbde..294bcb4a 100755
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -7,6 +7,7 @@
import base64
import crypt
import datetime
+import functools
import os
import os.path
import re
@@ -56,7 +57,6 @@ DEFAULT_FS = "ext4"
# DMI chassis-asset-tag is set static for all azure instances
AZURE_CHASSIS_ASSET_TAG = "7783-7084-3265-9085-8269-3286-77"
REPROVISION_MARKER_FILE = "/var/lib/cloud/data/poll_imds"
-REPROVISION_NIC_DETACHED_MARKER_FILE = "/var/lib/cloud/data/nic_detached"
REPORTED_READY_MARKER_FILE = "/var/lib/cloud/data/reported_ready"
AGENT_SEED_DIR = "/var/lib/waagent"
DEFAULT_PROVISIONING_ISO_DEV = "/dev/sr0"
@@ -68,6 +68,17 @@ IMDS_URL = "http://169.254.169.254/metadata"
IMDS_VER_MIN = "2019-06-01"
IMDS_VER_WANT = "2021-08-01"
IMDS_EXTENDED_VER_MIN = "2021-03-01"
+IMDS_RETRY_CODES = (
+ 404, # not found (yet)
+ 410, # gone / unavailable (yet)
+ 429, # rate-limited/throttled
+ 500, # server error
+)
+imds_readurl_exception_callback = functools.partial(
+ retry_on_url_exc,
+ retry_codes=IMDS_RETRY_CODES,
+ retry_instances=(requests.Timeout,),
+)
class MetadataType(Enum):
@@ -726,44 +737,49 @@ class DataSourceAzure(sources.DataSource):
def get_imds_data_with_api_fallback(
self,
*,
- retries,
- md_type=MetadataType.ALL,
- exc_cb=retry_on_url_exc,
- infinite=False,
- ):
- """
- Wrapper for get_metadata_from_imds so that we can have flexibility
- in which IMDS api-version we use. If a particular instance of IMDS
- does not have the api version that is desired, we want to make
- this fault tolerant and fall back to a good known minimum api
- version.
- """
- for _ in range(retries):
- try:
- LOG.info("Attempting IMDS api-version: %s", IMDS_VER_WANT)
- return get_metadata_from_imds(
- retries=0,
- md_type=md_type,
- api_version=IMDS_VER_WANT,
- exc_cb=exc_cb,
- )
- except UrlError as err:
- LOG.info("UrlError with IMDS api-version: %s", IMDS_VER_WANT)
- if err.code == 400:
- log_msg = "Fall back to IMDS api-version: {}".format(
- IMDS_VER_MIN
- )
- report_diagnostic_event(log_msg, logger_func=LOG.info)
- break
+ retries: int,
+ md_type: MetadataType = MetadataType.ALL,
+ exc_cb=imds_readurl_exception_callback,
+ infinite: bool = False,
+ ) -> dict:
+ """Fetch metadata from IMDS using IMDS_VER_WANT API version.
- LOG.info("Using IMDS api-version: %s", IMDS_VER_MIN)
- return get_metadata_from_imds(
- retries=retries,
- md_type=md_type,
- api_version=IMDS_VER_MIN,
- exc_cb=exc_cb,
- infinite=infinite,
- )
+ Falls back to IMDS_VER_MIN version if IMDS returns a 400 error code,
+ indicating that IMDS_VER_WANT is unsupported.
+
+ :return: Parsed metadata dictionary or empty dict on error.
+ """
+ LOG.info("Attempting IMDS api-version: %s", IMDS_VER_WANT)
+ try:
+ return get_metadata_from_imds(
+ retries=retries,
+ md_type=md_type,
+ api_version=IMDS_VER_WANT,
+ exc_cb=exc_cb,
+ infinite=infinite,
+ )
+ except UrlError as error:
+ LOG.info("UrlError with IMDS api-version: %s", IMDS_VER_WANT)
+ # Fall back if HTTP code is 400, otherwise return empty dict.
+ if error.code != 400:
+ return {}
+
+ log_msg = "Fall back to IMDS api-version: {}".format(IMDS_VER_MIN)
+ report_diagnostic_event(log_msg, logger_func=LOG.info)
+ try:
+ return get_metadata_from_imds(
+ retries=retries,
+ md_type=md_type,
+ api_version=IMDS_VER_MIN,
+ exc_cb=exc_cb,
+ infinite=infinite,
+ )
+ except UrlError as error:
+ report_diagnostic_event(
+ "Failed to fetch IMDS metadata: %s" % error,
+ logger_func=LOG.error,
+ )
+ return {}
def device_name_to_device(self, name):
return self.ds_cfg["disk_aliases"].get(name)
@@ -881,11 +897,6 @@ class DataSourceAzure(sources.DataSource):
"The preprovisioned nic %s is detached" % ifname,
logger_func=LOG.warning,
)
- path = REPROVISION_NIC_DETACHED_MARKER_FILE
- LOG.info("Creating a marker file for nic detached: %s", path)
- util.write_file(
- path, "{pid}: {time}\n".format(pid=os.getpid(), time=time())
- )
except AssertionError as error:
report_diagnostic_event(str(error), logger_func=LOG.error)
raise
@@ -1153,42 +1164,10 @@ class DataSourceAzure(sources.DataSource):
nl_sock = None
try:
nl_sock = netlink.create_bound_netlink_socket()
-
- report_ready_marker_present = bool(
- os.path.isfile(REPORTED_READY_MARKER_FILE)
- )
-
- # Report ready if the marker file is not already present.
- # The nic of the preprovisioned vm gets hot-detached as soon as
- # we report ready. So no need to save the dhcp context.
- if not os.path.isfile(REPORTED_READY_MARKER_FILE):
- self._report_ready_for_pps()
-
- has_nic_been_detached = bool(
- os.path.isfile(REPROVISION_NIC_DETACHED_MARKER_FILE)
- )
-
- if not has_nic_been_detached:
- LOG.info("NIC has not been detached yet.")
- self._teardown_ephemeral_networking()
- self._wait_for_nic_detach(nl_sock)
-
- # If we know that the preprovisioned nic has been detached, and we
- # still have a fallback nic, then it means the VM must have
- # rebooted as part of customer assignment, and all the nics have
- # already been attached by the Azure platform. So there is no need
- # to wait for nics to be hot-attached.
- if not self.fallback_interface:
- self._wait_for_hot_attached_nics(nl_sock)
- else:
- report_diagnostic_event(
- "Skipping waiting for nic attach "
- "because we already have a fallback "
- "interface. Report Ready marker "
- "present before detaching nics: %s"
- % report_ready_marker_present,
- logger_func=LOG.info,
- )
+ self._report_ready_for_pps()
+ self._teardown_ephemeral_networking()
+ self._wait_for_nic_detach(nl_sock)
+ self._wait_for_hot_attached_nics(nl_sock)
except netlink.NetlinkCreateSocketError as e:
report_diagnostic_event(str(e), logger_func=LOG.warning)
raise
@@ -1506,7 +1485,6 @@ class DataSourceAzure(sources.DataSource):
"""Cleanup any marker files."""
util.del_file(REPORTED_READY_MARKER_FILE)
util.del_file(REPROVISION_MARKER_FILE)
- util.del_file(REPROVISION_NIC_DETACHED_MARKER_FILE)
@azure_ds_telemetry_reporter
def activate(self, cfg, is_new_instance):
@@ -2310,7 +2288,7 @@ def get_metadata_from_imds(
retries,
md_type=MetadataType.ALL,
api_version=IMDS_VER_MIN,
- exc_cb=retry_on_url_exc,
+ exc_cb=imds_readurl_exception_callback,
infinite=False,
):
"""Query Azure's instance metadata service, returning a dictionary.
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index 5b2f2ef9..c577e8da 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -639,16 +639,22 @@ def oauth_headers(
return signed_headers
-def retry_on_url_exc(msg, exc):
- """readurl exception_cb that will retry on NOT_FOUND and Timeout.
+def retry_on_url_exc(
+ msg, exc, *, retry_codes=(NOT_FOUND,), retry_instances=(requests.Timeout,)
+):
+ """Configurable retry exception callback for readurl().
+
+ :param retry_codes: Codes to retry on. Defaults to 404.
+ :param retry_instances: Exception types to retry on. Defaults to
+ requests.Timeout.
- Returns False to raise the exception from readurl, True to retry.
+ :returns: False to raise the exception from readurl(), True to retry.
"""
if not isinstance(exc, UrlError):
return False
- if exc.code == NOT_FOUND:
+ if exc.code in retry_codes:
return True
- if exc.cause and isinstance(exc.cause, requests.Timeout):
+ if exc.cause and isinstance(exc.cause, retry_instances):
return True
return False
diff --git a/cloudinit/util.py b/cloudinit/util.py
index 569fc215..1fadd196 100644
--- a/cloudinit/util.py
+++ b/cloudinit/util.py
@@ -1886,7 +1886,7 @@ def is_link(path):
def sym_link(source, link, force=False):
LOG.debug("Creating symbolic link from %r => %r", link, source)
- if force and os.path.exists(link):
+ if force and os.path.lexists(link):
del_file(link)
os.symlink(source, link)
diff --git a/debian/apport-launcher.py b/debian/apport-launcher.py
index 30fea31b..9fce9997 100644
--- a/debian/apport-launcher.py
+++ b/debian/apport-launcher.py
@@ -1,6 +1,7 @@
-'''Wrapper for cloudinit apport interface'''
+"""Wrapper for cloudinit apport interface"""
from cloudinit.apport import add_info as cloudinit_add_info
+
def add_info(report, ui):
return cloudinit_add_info(report, ui)
diff --git a/debian/changelog b/debian/changelog
index d7f7f572..1b15db4f 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,28 @@
+cloud-init (22.1-14-g2e17a0d6-0ubuntu1~22.04.1) jammy; urgency=medium
+
+ * debian/apport-launcher.py: Fix string and whitespace formatting
+ * New upstream snapshot.
+ - check for existing symlink while force creating symlink (#1281)
+ [Shreenidhi Shedi]
+ - Do not silently ignore integer uid (#1280) (LP: #1875772)
+ - tests: create a IPv4/IPv6 VPC in Ec2 integration tests (#1291)
+ - Integration test fix ppa (#1296)
+ - tests: on official EC2. cloud-id actually startswith aws not ec2 (#1289)
+ - test_ppa_source: accept both http and https URLs (#1292)
+ [Paride Legovini]
+ - Fix apt test on azure
+ - add "lkundrak" as contributor [Lubomir Rintel]
+ - Holmanb/integration test fix ppa (#1287)
+ - Include missing subcommand in manpage (#1279)
+ - Clean up artifacts from pytest, packaging, release with make clean
+ (#1277)
+ - sources/azure: ensure retries on IMDS request failure (#1271)
+ [Chris Patterson]
+ - sources/azure: removed unused saveable PPS paths (#1268)
+ [Chris Patterson]
+
+ -- Brett Holman <brett.holman@canonical.com> Thu, 24 Feb 2022 16:40:00 -0700
+
cloud-init (22.1-1-gb3d9acdd-0ubuntu1~22.04.1) jammy; urgency=medium
* New upstream snapshot.
diff --git a/doc/man/cloud-init.1 b/doc/man/cloud-init.1
index 3fde4148..2cb63135 100644
--- a/doc/man/cloud-init.1
+++ b/doc/man/cloud-init.1
@@ -4,7 +4,8 @@
cloud-init \- Cloud instance initialization
.SH SYNOPSIS
-.BR "cloud-init" " [-h] [-d] [-f FILES] [--force] [-v] {init,modules,single,query,dhclient-hook,features,analyze,collect-logs,clean,status}"
+.BR "cloud-init" " [-h] [-d] [-f FILES] [--force] [-v]
+{init,modules,single,query,dhclient-hook,features,analyze,devel,collect-logs,clean,status}"
.SH DESCRIPTION
Cloud-init provides a mechanism for cloud instance initialization.
@@ -54,6 +55,10 @@ Collect and tar all cloud-init debug info.
Remove logs and artifacts so cloud-init can re-run.
.TP
+.B "devel"
+Run development tools. See help output for subcommand details.
+
+.TP
.B "dhclient-hook"
Run the dhclient hook to record network info.
diff --git a/tests/integration_tests/clouds.py b/tests/integration_tests/clouds.py
index 83bc6af6..14eaf9c3 100644
--- a/tests/integration_tests/clouds.py
+++ b/tests/integration_tests/clouds.py
@@ -204,6 +204,14 @@ class Ec2Cloud(IntegrationCloud):
def _get_cloud_instance(self):
return EC2(tag="ec2-integration-test")
+ def _perform_launch(self, launch_kwargs, **kwargs):
+ """Use a dual-stack VPC for cloud-init integration testing."""
+ launch_kwargs["vpc"] = self.cloud_instance.get_or_create_vpc(
+ name="ec2-cloud-init-integration"
+ )
+ pycloudlib_instance = self.cloud_instance.launch(**launch_kwargs)
+ return pycloudlib_instance
+
class GceCloud(IntegrationCloud):
datasource = "gce"
diff --git a/tests/integration_tests/modules/test_apt.py b/tests/integration_tests/modules/test_apt.py
index 6b3a8b7c..2e07b33b 100644
--- a/tests/integration_tests/modules/test_apt.py
+++ b/tests/integration_tests/modules/test_apt.py
@@ -152,9 +152,10 @@ class TestApt:
"/etc/apt/sources.list.d/"
"simplestreams-dev-ubuntu-trunk-{}.list".format(release)
)
-
assert (
- "http://ppa.launchpad.net/simplestreams-dev/trunk/ubuntu"
+ "://ppa.launchpad.net/simplestreams-dev/trunk/ubuntu"
+ in ppa_path_contents
+ or "://ppa.launchpadcontent.net/simplestreams-dev/trunk/ubuntu"
in ppa_path_contents
)
@@ -267,9 +268,13 @@ class TestDefaults:
sources_list = class_client.read_from_file("/etc/apt/sources.list")
# 3 lines from main, universe, and multiverse
- sec_url = "deb http://security.ubuntu.com/ubuntu"
+ release = ImageSpecification.from_os_image().release
+ sec_url = f"deb http://security.ubuntu.com/ubuntu {release}-security"
if class_client.settings.PLATFORM == "azure":
- sec_url = "deb http://azure.archive.ubuntu.com/ubuntu/"
+ sec_url = (
+ f"deb http://azure.archive.ubuntu.com/ubuntu/"
+ f" {release}-security"
+ )
sec_src_url = sec_url.replace("deb ", "# deb-src ")
assert 3 == sources_list.count(sec_url)
assert 3 == sources_list.count(sec_src_url)
diff --git a/tests/integration_tests/modules/test_combined.py b/tests/integration_tests/modules/test_combined.py
index 7a9a6e27..05499580 100644
--- a/tests/integration_tests/modules/test_combined.py
+++ b/tests/integration_tests/modules/test_combined.py
@@ -309,10 +309,10 @@ class TestCombined:
v1_data = data["v1"]
assert v1_data["cloud_name"] == "aws"
assert v1_data["platform"] == "ec2"
- # Different regions will show up as ec2-(gov|china)
- assert v1_data["cloud_id"].startswith("ec2")
+ # Different regions will show up as aws-(gov|china)
+ assert v1_data["cloud_id"].startswith("aws")
assert f"{v1_data['cloud_id']}" == client.read_from_file(
- "/run/cloud-init/cloud-id-ec2"
+ "/run/cloud-init/cloud-id-aws"
)
assert v1_data["subplatform"].startswith("metadata")
assert (
diff --git a/tests/integration_tests/modules/test_users_groups.py b/tests/integration_tests/modules/test_users_groups.py
index fddff681..8fa37bb4 100644
--- a/tests/integration_tests/modules/test_users_groups.py
+++ b/tests/integration_tests/modules/test_users_groups.py
@@ -38,6 +38,10 @@ AHWYPYb2FT.lbioDm2RrkJPb9BZMN1O/
gecos: Magic Cloud App Daemon User
inactive: true
system: true
+ - name: eric
+ uid: 1742
+ - name: archivist
+ uid: '1743'
"""
@@ -75,6 +79,10 @@ class TestUsersGroups:
),
# Test the cloudy user
(["passwd", "cloudy"], r"cloudy:x:[0-9]{3,4}:"),
+ # Test str uid
+ (["passwd", "eric"], r"eric:x:1742:"),
+ # Test int uid
+ (["passwd", "archivist"], r"archivist:x:1743:"),
],
)
def test_users_groups(self, regex, getent_args, class_client):
diff --git a/tests/unittests/sources/test_azure.py b/tests/unittests/sources/test_azure.py
index 5f956a63..551de59f 100644
--- a/tests/unittests/sources/test_azure.py
+++ b/tests/unittests/sources/test_azure.py
@@ -3,6 +3,7 @@
import copy
import crypt
import json
+import logging
import os
import stat
import xml.etree.ElementTree as ET
@@ -152,12 +153,24 @@ def mock_readurl():
@pytest.fixture
+def mock_requests_session_request():
+ with mock.patch("requests.Session.request", autospec=True) as m:
+ yield m
+
+
+@pytest.fixture
def mock_subp_subp():
with mock.patch(MOCKPATH + "subp.subp", side_effect=[]) as m:
yield m
@pytest.fixture
+def mock_url_helper_time_sleep():
+ with mock.patch("cloudinit.url_helper.time.sleep", autospec=True) as m:
+ yield m
+
+
+@pytest.fixture
def mock_util_ensure_dir():
with mock.patch(
MOCKPATH + "util.ensure_dir",
@@ -2220,10 +2233,11 @@ scbus-1 on xpt0 bus 0
assert m_get_metadata_from_imds.mock_calls == [
mock.call(
- retries=0,
+ retries=10,
md_type=dsaz.MetadataType.ALL,
api_version="2021-08-01",
exc_cb=mock.ANY,
+ infinite=False,
),
mock.call(
retries=10,
@@ -2250,10 +2264,11 @@ scbus-1 on xpt0 bus 0
assert m_get_metadata_from_imds.mock_calls == [
mock.call(
- retries=0,
+ retries=10,
md_type=dsaz.MetadataType.ALL,
api_version="2021-08-01",
exc_cb=mock.ANY,
+ infinite=False,
)
]
@@ -2942,126 +2957,32 @@ class TestPreprovisioningHotAttachNics(CiTestCase):
dsaz.BUILTIN_DS_CONFIG["data_dir"] = self.waagent_d
self.paths = helpers.Paths({"cloud_dir": self.tmp})
- @mock.patch(
- "cloudinit.sources.helpers.netlink.wait_for_nic_detach_event",
- autospec=True,
- )
@mock.patch(MOCKPATH + "util.write_file", autospec=True)
- def test_nic_detach_writes_marker(self, m_writefile, m_detach):
- """When we detect that a nic gets detached, we write a marker for it"""
- dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
- nl_sock = mock.MagicMock()
- dsa._wait_for_nic_detach(nl_sock)
- m_detach.assert_called_with(nl_sock)
- self.assertEqual(1, m_detach.call_count)
- m_writefile.assert_called_with(
- dsaz.REPROVISION_NIC_DETACHED_MARKER_FILE, mock.ANY
- )
-
- @mock.patch(MOCKPATH + "util.write_file", autospec=True)
- @mock.patch(MOCKPATH + "DataSourceAzure.fallback_interface")
@mock.patch(MOCKPATH + "DataSourceAzure._report_ready")
+ @mock.patch(MOCKPATH + "DataSourceAzure._wait_for_hot_attached_nics")
@mock.patch(MOCKPATH + "DataSourceAzure._wait_for_nic_detach")
def test_detect_nic_attach_reports_ready_and_waits_for_detach(
- self, m_detach, m_report_ready, m_fallback_if, m_writefile
+ self,
+ m_detach,
+ m_wait_for_hot_attached_nics,
+ m_report_ready,
+ m_writefile,
):
"""Report ready first and then wait for nic detach"""
dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
dsa._wait_for_all_nics_ready()
- m_fallback_if.return_value = "Dummy interface"
self.assertEqual(1, m_report_ready.call_count)
+ self.assertEqual(1, m_wait_for_hot_attached_nics.call_count)
self.assertEqual(1, m_detach.call_count)
self.assertEqual(1, m_writefile.call_count)
m_writefile.assert_called_with(
dsaz.REPORTED_READY_MARKER_FILE, mock.ANY
)
- @mock.patch("os.path.isfile")
- @mock.patch(MOCKPATH + "DataSourceAzure.fallback_interface")
- @mock.patch(MOCKPATH + "EphemeralDHCPv4", autospec=True)
- @mock.patch(MOCKPATH + "DataSourceAzure._report_ready")
- @mock.patch(MOCKPATH + "DataSourceAzure._wait_for_nic_detach")
- def test_detect_nic_attach_skips_report_ready_when_marker_present(
- self, m_detach, m_report_ready, m_dhcp, m_fallback_if, m_isfile
- ):
- """Skip reporting ready if we already have a marker file."""
- dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
-
- def isfile(key):
- return key == dsaz.REPORTED_READY_MARKER_FILE
-
- m_isfile.side_effect = isfile
- dsa._wait_for_all_nics_ready()
- m_fallback_if.return_value = "Dummy interface"
- self.assertEqual(0, m_report_ready.call_count)
- self.assertEqual(0, m_dhcp.call_count)
- self.assertEqual(1, m_detach.call_count)
-
- @mock.patch("os.path.isfile")
- @mock.patch(MOCKPATH + "DataSourceAzure.fallback_interface")
- @mock.patch(MOCKPATH + "EphemeralDHCPv4")
+ @mock.patch(MOCKPATH + "util.write_file", autospec=True)
@mock.patch(MOCKPATH + "DataSourceAzure._report_ready")
- @mock.patch(MOCKPATH + "DataSourceAzure._wait_for_nic_detach")
- def test_detect_nic_attach_skips_nic_detach_when_marker_present(
- self, m_detach, m_report_ready, m_dhcp, m_fallback_if, m_isfile
- ):
- """Skip wait for nic detach if it already happened."""
- dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
-
- m_isfile.return_value = True
- dsa._wait_for_all_nics_ready()
- m_fallback_if.return_value = "Dummy interface"
- self.assertEqual(0, m_report_ready.call_count)
- self.assertEqual(0, m_dhcp.call_count)
- self.assertEqual(0, m_detach.call_count)
-
- @mock.patch(MOCKPATH + "DataSourceAzure.wait_for_link_up", autospec=True)
- @mock.patch("cloudinit.sources.helpers.netlink.wait_for_nic_attach_event")
- @mock.patch("cloudinit.sources.net.find_fallback_nic")
- @mock.patch(MOCKPATH + "get_metadata_from_imds")
- @mock.patch(MOCKPATH + "EphemeralDHCPv4", autospec=True)
- @mock.patch(MOCKPATH + "DataSourceAzure._wait_for_nic_detach")
- @mock.patch("os.path.isfile")
- def test_wait_for_nic_attach_if_no_fallback_interface(
- self,
- m_isfile,
- m_detach,
- m_dhcpv4,
- m_imds,
- m_fallback_if,
- m_attach,
- m_link_up,
- ):
- """Wait for nic attach if we do not have a fallback interface"""
- dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
- lease = {
- "interface": "eth9",
- "fixed-address": "192.168.2.9",
- "routers": "192.168.2.1",
- "subnet-mask": "255.255.255.0",
- "unknown-245": "624c3620",
- }
-
- m_isfile.return_value = True
- m_attach.return_value = "eth0"
- dhcp_ctx = mock.MagicMock(lease=lease)
- dhcp_ctx.obtain_lease.return_value = lease
- m_dhcpv4.return_value = dhcp_ctx
- m_imds.return_value = IMDS_NETWORK_METADATA
- m_fallback_if.return_value = None
-
- dsa._wait_for_all_nics_ready()
-
- self.assertEqual(0, m_detach.call_count)
- self.assertEqual(1, m_attach.call_count)
- self.assertEqual(1, m_dhcpv4.call_count)
- self.assertEqual(1, m_imds.call_count)
- self.assertEqual(1, m_link_up.call_count)
- m_link_up.assert_called_with(mock.ANY, "eth0")
-
@mock.patch(MOCKPATH + "DataSourceAzure.wait_for_link_up")
@mock.patch("cloudinit.sources.helpers.netlink.wait_for_nic_attach_event")
- @mock.patch("cloudinit.sources.net.find_fallback_nic")
@mock.patch(MOCKPATH + "DataSourceAzure.get_imds_data_with_api_fallback")
@mock.patch(MOCKPATH + "EphemeralDHCPv4", autospec=True)
@mock.patch(MOCKPATH + "DataSourceAzure._wait_for_nic_detach")
@@ -3072,9 +2993,10 @@ class TestPreprovisioningHotAttachNics(CiTestCase):
m_detach,
m_dhcpv4,
m_imds,
- m_fallback_if,
m_attach,
m_link_up,
+ m_report_ready,
+ m_writefile,
):
"""Wait for nic attach if we do not have a fallback interface"""
dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths)
@@ -3103,11 +3025,10 @@ class TestPreprovisioningHotAttachNics(CiTestCase):
dhcp_ctx.obtain_lease.return_value = lease
m_dhcpv4.return_value = dhcp_ctx
m_imds.side_effect = [md]
- m_fallback_if.return_value = None
dsa._wait_for_all_nics_ready()
- self.assertEqual(0, m_detach.call_count)
+ self.assertEqual(1, m_detach.call_count)
self.assertEqual(2, m_attach.call_count)
# DHCP and network metadata calls will only happen on the primary NIC.
self.assertEqual(1, m_dhcpv4.call_count)
@@ -3720,6 +3641,195 @@ class TestRandomSeed(CiTestCase):
self.assertEqual(deserialized["seed"], result)
+def fake_http_error_for_code(status_code: int):
+ response_failure = requests.Response()
+ response_failure.status_code = status_code
+ return requests.exceptions.HTTPError(
+ "fake error",
+ response=response_failure,
+ )
+
+
+@pytest.mark.parametrize(
+ "md_type,expected_url",
+ [
+ (
+ dsaz.MetadataType.ALL,
+ "http://169.254.169.254/metadata/instance?"
+ "api-version=2021-08-01&extended=true",
+ ),
+ (
+ dsaz.MetadataType.NETWORK,
+ "http://169.254.169.254/metadata/instance/network?"
+ "api-version=2021-08-01",
+ ),
+ (
+ dsaz.MetadataType.REPROVISION_DATA,
+ "http://169.254.169.254/metadata/reprovisiondata?"
+ "api-version=2021-08-01",
+ ),
+ ],
+)
+class TestIMDS:
+ def test_basic_scenarios(
+ self, azure_ds, caplog, mock_readurl, md_type, expected_url
+ ):
+ fake_md = {"foo": {"bar": []}}
+ mock_readurl.side_effect = [
+ mock.MagicMock(contents=json.dumps(fake_md).encode()),
+ ]
+
+ md = azure_ds.get_imds_data_with_api_fallback(
+ retries=5,
+ md_type=md_type,
+ )
+
+ assert md == fake_md
+ assert mock_readurl.mock_calls == [
+ mock.call(
+ expected_url,
+ timeout=2,
+ headers={"Metadata": "true"},
+ retries=5,
+ exception_cb=dsaz.imds_readurl_exception_callback,
+ infinite=False,
+ ),
+ ]
+
+ warnings = [
+ x.message for x in caplog.records if x.levelno == logging.WARNING
+ ]
+ assert warnings == []
+
+ @pytest.mark.parametrize(
+ "error",
+ [
+ fake_http_error_for_code(404),
+ fake_http_error_for_code(410),
+ fake_http_error_for_code(429),
+ fake_http_error_for_code(500),
+ requests.Timeout("Fake connection timeout"),
+ ],
+ )
+ def test_will_retry_errors(
+ self,
+ azure_ds,
+ caplog,
+ md_type,
+ expected_url,
+ mock_requests_session_request,
+ mock_url_helper_time_sleep,
+ error,
+ ):
+ fake_md = {"foo": {"bar": []}}
+ mock_requests_session_request.side_effect = [
+ error,
+ mock.Mock(content=json.dumps(fake_md)),
+ ]
+
+ md = azure_ds.get_imds_data_with_api_fallback(
+ retries=5,
+ md_type=md_type,
+ )
+
+ assert md == fake_md
+ assert len(mock_requests_session_request.mock_calls) == 2
+ assert mock_url_helper_time_sleep.mock_calls == [mock.call(1)]
+
+ warnings = [
+ x.message for x in caplog.records if x.levelno == logging.WARNING
+ ]
+ assert warnings == []
+
+ @pytest.mark.parametrize("retries", [0, 1, 5, 10])
+ @pytest.mark.parametrize(
+ "error",
+ [
+ fake_http_error_for_code(404),
+ fake_http_error_for_code(410),
+ fake_http_error_for_code(429),
+ fake_http_error_for_code(500),
+ requests.Timeout("Fake connection timeout"),
+ ],
+ )
+ def test_retry_until_failure(
+ self,
+ azure_ds,
+ caplog,
+ md_type,
+ expected_url,
+ mock_requests_session_request,
+ mock_url_helper_time_sleep,
+ error,
+ retries,
+ ):
+ mock_requests_session_request.side_effect = [error] * (retries + 1)
+
+ assert (
+ azure_ds.get_imds_data_with_api_fallback(
+ retries=retries,
+ md_type=md_type,
+ )
+ == {}
+ )
+
+ assert len(mock_requests_session_request.mock_calls) == (retries + 1)
+ assert (
+ mock_url_helper_time_sleep.mock_calls == [mock.call(1)] * retries
+ )
+
+ warnings = [
+ x.message for x in caplog.records if x.levelno == logging.WARNING
+ ]
+ assert warnings == [
+ "Ignoring IMDS instance metadata. "
+ "Get metadata from IMDS failed: %s" % error
+ ]
+
+ @pytest.mark.parametrize(
+ "error",
+ [
+ fake_http_error_for_code(403),
+ fake_http_error_for_code(501),
+ requests.ConnectionError("Fake Network Unreachable"),
+ ],
+ )
+ def test_will_not_retry_errors(
+ self,
+ azure_ds,
+ caplog,
+ md_type,
+ expected_url,
+ mock_requests_session_request,
+ mock_url_helper_time_sleep,
+ error,
+ ):
+ fake_md = {"foo": {"bar": []}}
+ mock_requests_session_request.side_effect = [
+ error,
+ mock.Mock(content=json.dumps(fake_md)),
+ ]
+
+ assert (
+ azure_ds.get_imds_data_with_api_fallback(
+ retries=5,
+ md_type=md_type,
+ )
+ == {}
+ )
+
+ assert len(mock_requests_session_request.mock_calls) == 1
+ assert mock_url_helper_time_sleep.mock_calls == []
+
+ warnings = [
+ x.message for x in caplog.records if x.levelno == logging.WARNING
+ ]
+ assert warnings == [
+ "Ignoring IMDS instance metadata. "
+ "Get metadata from IMDS failed: %s" % error
+ ]
+
+
class TestProvisioning:
@pytest.fixture(autouse=True)
def provisioning_setup(
@@ -3816,8 +3926,8 @@ class TestProvisioning:
"api-version=2021-08-01&extended=true",
timeout=2,
headers={"Metadata": "true"},
- retries=0,
- exception_cb=dsaz.retry_on_url_exc,
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
infinite=False,
),
]
@@ -3886,8 +3996,8 @@ class TestProvisioning:
"api-version=2021-08-01&extended=true",
timeout=2,
headers={"Metadata": "true"},
- retries=0,
- exception_cb=dsaz.retry_on_url_exc,
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
infinite=False,
),
mock.call(
@@ -3904,8 +4014,8 @@ class TestProvisioning:
"api-version=2021-08-01&extended=true",
timeout=2,
headers={"Metadata": "true"},
- retries=0,
- exception_cb=dsaz.retry_on_url_exc,
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
infinite=False,
),
]
@@ -3976,12 +4086,8 @@ class TestProvisioning:
False, # /var/lib/cloud/data/poll_imds
False, # seed/azure/ovf-env.xml
False, # /var/lib/cloud/data/poll_imds
- False, # /var/lib/cloud/data/reported_ready
- False, # /var/lib/cloud/data/reported_ready
- False, # /var/lib/cloud/data/nic_detached
True, # /var/lib/cloud/data/reported_ready
]
- self.azure_ds._fallback_interface = False
self.azure_ds._get_data()
@@ -3994,9 +4100,6 @@ class TestProvisioning:
),
mock.call("/var/lib/cloud/data/poll_imds"),
mock.call("/var/lib/cloud/data/reported_ready"),
- mock.call("/var/lib/cloud/data/reported_ready"),
- mock.call("/var/lib/cloud/data/nic_detached"),
- mock.call("/var/lib/cloud/data/reported_ready"),
]
assert self.mock_readurl.mock_calls == [
@@ -4005,13 +4108,13 @@ class TestProvisioning:
"api-version=2021-08-01&extended=true",
timeout=2,
headers={"Metadata": "true"},
- retries=0,
- exception_cb=dsaz.retry_on_url_exc,
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
infinite=False,
),
mock.call(
"http://169.254.169.254/metadata/instance/network?"
- "api-version=2019-06-01",
+ "api-version=2021-08-01",
timeout=2,
headers={"Metadata": "true"},
retries=0,
@@ -4032,8 +4135,8 @@ class TestProvisioning:
"api-version=2021-08-01&extended=true",
timeout=2,
headers={"Metadata": "true"},
- retries=0,
- exception_cb=dsaz.retry_on_url_exc,
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
infinite=False,
),
]
diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py
index 3765511b..3f3079b0 100644
--- a/tests/unittests/test_util.py
+++ b/tests/unittests/test_util.py
@@ -361,6 +361,52 @@ class TestUtil(CiTestCase):
self.assertEqual(is_rw, False)
+class TestSymlink(CiTestCase):
+ def test_sym_link_simple(self):
+ tmpd = self.tmp_dir()
+ link = self.tmp_path("link", tmpd)
+ target = self.tmp_path("target", tmpd)
+ util.write_file(target, "hello")
+
+ util.sym_link(target, link)
+ self.assertTrue(os.path.exists(link))
+ self.assertTrue(os.path.islink(link))
+
+ def test_sym_link_source_exists(self):
+ tmpd = self.tmp_dir()
+ link = self.tmp_path("link", tmpd)
+ target = self.tmp_path("target", tmpd)
+ util.write_file(target, "hello")
+
+ util.sym_link(target, link)
+ self.assertTrue(os.path.exists(link))
+
+ util.sym_link(target, link, force=True)
+ self.assertTrue(os.path.exists(link))
+
+ def test_sym_link_dangling_link(self):
+ tmpd = self.tmp_dir()
+ link = self.tmp_path("link", tmpd)
+ target = self.tmp_path("target", tmpd)
+
+ util.sym_link(target, link)
+ self.assertTrue(os.path.islink(link))
+ self.assertFalse(os.path.exists(link))
+
+ util.sym_link(target, link, force=True)
+ self.assertTrue(os.path.islink(link))
+ self.assertFalse(os.path.exists(link))
+
+ def test_sym_link_create_dangling(self):
+ tmpd = self.tmp_dir()
+ link = self.tmp_path("link", tmpd)
+ target = self.tmp_path("target", tmpd)
+
+ util.sym_link(target, link)
+ self.assertTrue(os.path.islink(link))
+ self.assertFalse(os.path.exists(link))
+
+
class TestUptime(CiTestCase):
@mock.patch("cloudinit.util.boottime")
@mock.patch("cloudinit.util.os.path.exists")
diff --git a/tools/.github-cla-signers b/tools/.github-cla-signers
index ac157a2f..0567a89d 100644
--- a/tools/.github-cla-signers
+++ b/tools/.github-cla-signers
@@ -47,6 +47,7 @@ jshen28
klausenbusk
KsenijaS
landon912
+lkundrak
lucasmoura
lucendio
lungj