summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrett Holman <brett.holman@canonical.com>2022-08-19 10:49:08 -0600
committergit-ubuntu importer <ubuntu-devel-discuss@lists.ubuntu.com>2022-08-19 18:15:09 +0000
commit9ee102d8a1e0c48fa7fdd9f468a12f25b60ebd22 (patch)
treec78cc8081199a63a98d119eb144168dea5fac85a
parenta3b61fe9453bb6f4a9354c977c288aac4457fd27 (diff)
downloadcloud-init-git-9ee102d8a1e0c48fa7fdd9f468a12f25b60ebd22.tar.gz
22.3-0ubuntu1~22.10.1 (patches unapplied)
Imported using git-ubuntu import.
-rw-r--r--.github/workflows/check_format.yml2
-rw-r--r--ChangeLog207
-rwxr-xr-xcloudinit/cmd/main.py2
-rw-r--r--cloudinit/config/cc_ntp.py8
-rw-r--r--cloudinit/config/cc_set_hostname.py4
-rw-r--r--cloudinit/config/schema.py9
-rw-r--r--cloudinit/config/schemas/versions.schema.cloud-config.json2
-rw-r--r--cloudinit/net/netplan.py27
-rw-r--r--cloudinit/sources/DataSourceAzure.py41
-rw-r--r--cloudinit/sources/__init__.py49
-rw-r--r--cloudinit/sources/helpers/azure.py12
-rw-r--r--cloudinit/stages.py41
-rw-r--r--cloudinit/version.py2
-rw-r--r--debian/changelog27
-rw-r--r--integration-requirements.txt2
-rw-r--r--templates/chrony.conf.centos.tmpl45
-rw-r--r--tests/integration_tests/bugs/test_lp1835584.py8
-rw-r--r--tests/integration_tests/conftest.py82
-rw-r--r--tests/integration_tests/modules/test_ansible.py47
-rw-r--r--tests/integration_tests/modules/test_lxd.py31
-rw-r--r--tests/integration_tests/modules/test_wireguard.py16
-rw-r--r--tests/integration_tests/util.py1
-rw-r--r--tests/unittests/config/test_apt_source_v1.py7
-rw-r--r--tests/unittests/config/test_apt_source_v3.py7
-rw-r--r--tests/unittests/config/test_schema.py88
-rw-r--r--tests/unittests/reporting/test_webhook_handler.py5
-rw-r--r--tests/unittests/sources/test_azure.py140
-rw-r--r--tests/unittests/sources/test_azure_helper.py29
-rw-r--r--tests/unittests/sources/test_init.py47
-rw-r--r--tests/unittests/test_data.py14
-rw-r--r--tests/unittests/test_stages.py22
-rw-r--r--tests/unittests/test_upgrade.py4
-rw-r--r--tests/unittests/util.py31
-rw-r--r--tools/.github-cla-signers1
-rw-r--r--tox.ini5
35 files changed, 828 insertions, 237 deletions
diff --git a/.github/workflows/check_format.yml b/.github/workflows/check_format.yml
index 874534c0..1cc2fac2 100644
--- a/.github/workflows/check_format.yml
+++ b/.github/workflows/check_format.yml
@@ -16,7 +16,7 @@ jobs:
matrix:
env: [flake8, mypy, pylint, black, isort]
lint-with:
- - {tip-versions: false, os: ubuntu-18.04}
+ - {tip-versions: false, os: ubuntu-20.04}
- {tip-versions: true, os: ubuntu-latest}
name: Check ${{ matrix.lint-with.tip-versions && 'tip-' || '' }}${{ matrix.env }}
runs-on: ${{ matrix.lint-with.os }}
diff --git a/ChangeLog b/ChangeLog
index d23d129d..0ff76617 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,210 @@
+22.3
+ - sources: obj.pkl cache should be written anyime get_data is run (#1669)
+ - schema: drop release number from version file (#1664)
+ - pycloudlib: bump to quiet azure HTTP info logs (#1668)
+ - test: fix wireguard integration tests (#1666)
+ - Github is deprecating the 18.04 runner starting 12.1 (#1665)
+ - integration tests: Ensure one setup for all tests (#1661)
+ - tests: ansible test fixes (#1660)
+ - Prevent concurrency issue in test_webhook_hander.py (#1658)
+ - Workaround net_setup_link race with udev (#1655) (LP: #1983516)
+ - test: drop erroneous lxd assertion, verify command succeeded (#1657)
+ - Fix Chrony usage on Centos Stream (#1648) [Sven Haardiek] (LP: #1885952)
+ - sources/azure: handle network unreachable errors for savable PPS (#1642)
+ [Chris Patterson]
+ - Return cc_set_hostname to PER_INSTANCE frequency (#1651) (LP: #1983811)
+ - test: Collect integration test time by default (#1638)
+ - test: Drop forced package install hack in lxd integration test (#1649)
+ - schema: Resolve user-data if --system given (#1644)
+ [Alberto Contreras] (LP: #1983306)
+ - test: use fake filesystem to avoid file removal (#1647)
+ [Alberto Contreras]
+ - tox: Fix tip-flake8 and tip-mypy (#1635) [Alberto Contreras]
+ - config: Add wireguard config module (#1570) [Fabian Lichtenegger-Lukas]
+ - tests: can run without azure-cli, tests expect inactive ansible (#1643)
+ - typing: Type UrlResponse.contents (#1633) [Alberto Contreras]
+ - testing: fix references to `DEPRECATED.` (#1641) [Alberto Contreras]
+ - ssh_util: Handle sshd_config.d folder [Alberto Contreras] (LP: #1968873)
+ - schema: Enable deprecations in cc_update_etc_hosts (#1631)
+ [Alberto Contreras]
+ - Add Ansible Config Module (#1579)
+ - util: Support Idle process state in get_proc_ppid() (#1637)
+ - schema: Enable deprecations in cc_growpart (#1628) [Alberto Contreras]
+ - schema: Enable deprecations in cc_users_groups (#1627)
+ [Alberto Contreras]
+ - util: Fix error path and parsing in get_proc_ppid()
+ - main: avoid downloading full contents cmdline urls (#1606)
+ [Alberto Contreras] (LP: #1937319)
+ - schema: Enable deprecations in cc_scripts_vendor (#1629)
+ [Alberto Contreras]
+ - schema: Enable deprecations in cc_set_passwords (#1630)
+ [Alberto Contreras]
+ - sources/azure: add experimental support for preprovisioned os disks
+ (#1622) [Chris Patterson]
+ - Remove configobj a_to_u calls (#1632) [Stefano Rivera]
+ - cc_debug: Drop this module (#1614) [Alberto Contreras]
+ - schema: add aggregate descriptions in anyOf/oneOf (#1636)
+ - testing: migrate test_sshutil to pytest (#1617) [Alberto Contreras]
+ - testing: Fix test_ca_certs integration test (#1626) [Alberto Contreras]
+ - testing: add support for pycloudlib's pro images (#1604)
+ [Alberto Contreras]
+ - testing: migrate test_cc_set_passwords to pytest (#1615)
+ [Alberto Contreras]
+ - network: add system_info network activator cloud.cfg overrides (#1619)
+ (LP: #1958377)
+ - docs: Align git remotes with uss-tableflip setup (#1624)
+ [Alberto Contreras]
+ - testing: cover active config module checks (#1609) [Alberto Contreras]
+ - lxd: lvm avoid thinpool when kernel module absent
+ - lxd: enable MTU configuration in cloud-init
+ - doc: pin doc8 to last passing version
+ - cc_set_passwords fixes (#1590)
+ - Modernise importer.py and type ModuleDetails (#1605) [Alberto Contreras]
+ - config: Def activate_by_schema_keys for t-z (#1613) [Alberto Contreras]
+ - config: define activate_by_schema_keys for p-r mods (#1611)
+ [Alberto Contreras]
+ - clean: add param to remove /etc/machine-id for golden image creation
+ - config: define `activate_by_schema_keys` for a-f mods (#1608)
+ [Alberto Contreras]
+ - config: define activate_by_schema_keys for s mods (#1612)
+ [Alberto Contreras]
+ - sources/azure: reorganize tests for network config (#1586)
+ [Chris Patterson]
+ - config: Define activate_by_schema_keys for g-n mods (#1610)
+ [Alberto Contreras]
+ - meta-schema: add infra to skip inapplicable modules [Alberto Contreras]
+ - sources/azure: don't set cfg["password"] for default user pw (#1592)
+ [Chris Patterson]
+ - schema: activate grub-dpkg deprecations (#1600) [Alberto Contreras]
+ - docs: clarify user password purposes (#1593)
+ - cc_lxd: Add btrfs and lvm lxd storage options (SC-1026) (#1585)
+ - archlinux: Fix distro naming[1] (#1601) [Kristian Klausen]
+ - cc_ubuntu_autoinstall: support live-installer autoinstall config
+ - clean: allow third party cleanup scripts in /etc/cloud/clean.d (#1581)
+ - sources/azure: refactor chassis asset tag handling (#1574)
+ [Chris Patterson]
+ - Add "netcho" as contributor (#1591) [Kaloyan Kotlarski]
+ - testing: drop impish support (#1596) [Alberto Contreras]
+ - black: fix missed formatting issue which landed in main (#1594)
+ - bsd: Don't assume that root user is in root group (#1587)
+ - docs: Fix comment typo regarding use of packages (#1582)
+ [Peter Mescalchin]
+ - Update govc command in VMWare walkthrough (#1576) [manioo8]
+ - Update .github-cla-signers (#1588) [Daniel Mullins]
+ - Rename the openmandriva user to omv (#1575) [Bernhard Rosenkraenzer]
+ - sources/azure: increase read-timeout to 60 seconds for wireserver
+ (#1571) [Chris Patterson]
+ - Resource leak cleanup (#1556)
+ - testing: remove appereances of FakeCloud (#1584) [Alberto Contreras]
+ - Fix expire passwords for hashed passwords (#1577)
+ [Sadegh Hayeri] (LP: #1979065)
+ - mounts: fix suggested_swapsize for > 64GB hosts (#1569) [Steven Stallion]
+ - Update chpasswd schema to deprecate password parsing (#1517)
+ - tox: Remove entries from default envlist (#1578) (LP: #1980854)
+ - tests: add test for parsing static dns for existing devices (#1557)
+ [Jonas Konrad]
+ - testing: port cc_ubuntu_advantage test to pytest (#1559)
+ [Alberto Contreras]
+ - Schema deprecation handling (#1549) [Alberto Contreras]
+ - Enable pytest to run in parallel (#1568)
+ - sources/azure: refactor ovf-env.xml parsing (#1550) [Chris Patterson]
+ - schema: Force stricter validation (#1547)
+ - ubuntu advantage config: http_proxy, https_proxy (#1512)
+ [Fabian Lichtenegger-Lukas]
+ - net: fix interface matching support (#1552) (LP: #1979877)
+ - Fuzz testing jsonchema (#1499) [Alberto Contreras]
+ - testing: Wait for changed boot-id in test_status.py (#1548)
+ - CI: Fix GH pinned-format jobs (#1558) [Alberto Contreras]
+ - Typo fix (#1560) [Jaime Hablutzel]
+ - tests: mock dns lookup that causes long timeouts (#1555)
+ - tox: add unpinned env for do_format and check_format (#1554)
+ - cc_ssh_import_id: Substitute deprecated warn (#1553) [Alberto Contreras]
+ - Remove schema errors from log (#1551) (LP: #1978422) (CVE-2022-2084)
+ - Update WebHookHandler to run as background thread (SC-456) (#1491)
+ (LP: #1910552)
+ - testing: Don't run custom cloud dir test on Bionic (#1542)
+ - bash completion: update schema command (#1543) (LP: #1979547)
+ - CI: add non-blocking run against the linters tip versions (#1531)
+ [Paride Legovini]
+ - Change groups within the users schema to support lists and strings
+ (#1545) [RedKrieg]
+ - make it clear which username should go in the contributing doc (#1546)
+ - Pin setuptools for Travis (SC-1136) (#1540)
+ - Fix LXD datasource crawl when BOOT enabled (#1537)
+ - testing: Fix wrong path in dual stack test (#1538)
+ - cloud-config: honor cloud_dir setting (#1523)
+ [Alberto Contreras] (LP: #1976564)
+ - Add python3-debconf to pkg-deps.json Build-Depends (#1535)
+ [Alberto Contreras]
+ - redhat spec: udev/rules.d lives under /usr/lib on rhel-based systems
+ (#1536)
+ - tests/azure: add test coverage for DisableSshPasswordAuthentication
+ (#1534) [Chris Patterson]
+ - summary: Add david-caro to the cla signers (#1527) [David Caro]
+ - Add support for OpenMandriva (https://openmandriva.org/) (#1520)
+ [Bernhard Rosenkraenzer]
+ - tests/azure: refactor ovf creation (#1533) [Chris Patterson]
+ - Improve DataSourceOVF error reporting when script disabled (#1525) [rong]
+ - tox: integration-tests-jenkins: softfail if only some test failed
+ (#1528) [Paride Legovini]
+ - CI: drop linters from Travis CI (moved to GH Actions) (#1530)
+ [Paride Legovini]
+ - sources/azure: remove unused encoding support for customdata (#1526)
+ [Chris Patterson]
+ - sources/azure: remove unused metadata captured when parsing ovf (#1524)
+ [Chris Patterson]
+ - sources/azure: remove dscfg parsing from ovf-env.xml (#1522)
+ [Chris Patterson]
+ - Remove extra space from ec2 dual stack crawl message (#1521)
+ - tests/azure: use namespaces in generated ovf-env.xml documents (#1519)
+ [Chris Patterson]
+ - setup.py: adjust udev/rules default path (#1513)
+ [Emanuele Giuseppe Esposito]
+ - Add python3-deconf dependency (#1506) [Alberto Contreras]
+ - Change match macadress param for network v2 config (#1518)
+ [Henrique Caricatti Capozzi]
+ - sources/azure: remove unused userdata property from ovf (#1516)
+ [Chris Patterson]
+ - sources/azure: minor refactoring to network config generation (#1497)
+ [Chris Patterson]
+ - net: Implement link-local ephemeral ipv6
+ - Rename function to avoid confusion (#1501)
+ - Fix cc_phone_home requiring 'tries' (#1500) (LP: #1977952)
+ - datasources: replace networking functions with stdlib and cloudinit.net
+ code
+ - Remove xenial references (#1472) [Alberto Contreras]
+ - Oracle ds changes (#1474) [Alberto Contreras] (LP: #1967942)
+ - improve runcmd docs (#1498)
+ - add 3.11-dev to Travis CI (#1493)
+ - Only run github actions on pull request (#1496)
+ - Fix integration test client creation (#1494) [Alberto Contreras]
+ - tox: add link checker environment, fix links (#1480)
+ - cc_ubuntu_advantage: Fix doc (#1487) [Alberto Contreras]
+ - cc_yum_add_repo: Fix repo id canonicalization (#1489)
+ [Alberto Contreras] (LP: #1975818)
+ - Add linitio as contributor in the project (#1488) [Kevin Allioli]
+ - net-convert: use yaml.dump for debugging python NetworkState obj (#1484)
+ (LP: #1975907)
+ - test_schema: no relative $ref URLs, replace $ref with local path (#1486)
+ - cc_set_hostname: do not write "localhost" when no hostname is given
+ (#1453) [Emanuele Giuseppe Esposito]
+ - Update .github-cla-signers (#1478) [rong]
+ - schema: write_files defaults, versions $ref full URL and add vscode
+ (#1479)
+ - docs: fix external links, add one more to the list (#1477)
+ - doc: Document how to change module frequency (#1481)
+ - tests: bump pycloudlib (#1482)
+ - tests: bump pycloudlib pinned commit for kinetic Azure (#1476)
+ - testing: fix test_status.py (#1475)
+ - integration tests: If KEEP_INSTANCE = True, log IP (#1473)
+ - Drop mypy excluded files (#1454) [Alberto Contreras]
+ - Docs additions (#1470)
+ - Add "formatting tests" to Github Actions
+ - Remove unused arguments in function signature (#1471)
+ - Changelog: correct errant classification of LP issues as GH (#1464)
+ - Use Network-Manager and Netplan as default renderers for RHEL and Fedora
+ (#1465) [Emanuele Giuseppe Esposito]
+
22.2
- Fix test due to caplog incompatibility (#1461) [Alberto Contreras]
- Align rhel custom files with upstream (#1431)
diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py
index 2860126a..6134d7c4 100755
--- a/cloudinit/cmd/main.py
+++ b/cloudinit/cmd/main.py
@@ -809,7 +809,7 @@ def _maybe_persist_instance_data(init):
init.paths.run_dir, sources.INSTANCE_JSON_FILE
)
if not os.path.exists(instance_data_file):
- init.datasource.persist_instance_data()
+ init.datasource.persist_instance_data(write_cache=False)
def _maybe_set_hostname(init, stage, retry_stage):
diff --git a/cloudinit/config/cc_ntp.py b/cloudinit/config/cc_ntp.py
index 20c0ad8e..7974f5b2 100644
--- a/cloudinit/config/cc_ntp.py
+++ b/cloudinit/config/cc_ntp.py
@@ -88,6 +88,14 @@ DISTRO_CLIENT_CONFIG = {
"service_name": "ntpd",
},
},
+ "centos": {
+ "ntp": {
+ "service_name": "ntpd",
+ },
+ "chrony": {
+ "service_name": "chronyd",
+ },
+ },
"debian": {
"chrony": {
"confpath": "/etc/chrony/chrony.conf",
diff --git a/cloudinit/config/cc_set_hostname.py b/cloudinit/config/cc_set_hostname.py
index 2a4c565f..3ea9e4ed 100644
--- a/cloudinit/config/cc_set_hostname.py
+++ b/cloudinit/config/cc_set_hostname.py
@@ -14,9 +14,9 @@ from cloudinit import util
from cloudinit.atomic_helper import write_json
from cloudinit.config.schema import MetaSchema, get_meta_doc
from cloudinit.distros import ALL_DISTROS
-from cloudinit.settings import PER_ALWAYS
+from cloudinit.settings import PER_INSTANCE
-frequency = PER_ALWAYS
+frequency = PER_INSTANCE
MODULE_DESCRIPTION = """\
This module handles setting the system hostname and fully qualified domain
name (FQDN). If ``preserve_hostname`` is set, then the hostname will not be
diff --git a/cloudinit/config/schema.py b/cloudinit/config/schema.py
index d62073d0..42792985 100644
--- a/cloudinit/config/schema.py
+++ b/cloudinit/config/schema.py
@@ -18,7 +18,7 @@ from typing import TYPE_CHECKING, List, NamedTuple, Optional, Type, Union, cast
import yaml
from cloudinit import importer, safeyaml
-from cloudinit.cmd.devel import read_cfg_paths
+from cloudinit.stages import Init
from cloudinit.util import error, get_modules_from_dir, load_file
try:
@@ -617,9 +617,10 @@ def validate_cloudconfig_file(config_path, schema, annotate=False):
"Unable to read system userdata as non-root user."
" Try using sudo"
)
- paths = read_cfg_paths()
- user_data_file = paths.get_ipath_cur("userdata_raw")
- content = load_file(user_data_file, decode=False)
+ init = Init(ds_deps=[])
+ init.fetch(existing="trust")
+ init.consume_data()
+ content = load_file(init.paths.get_ipath("cloud_config"), decode=False)
else:
if not os.path.exists(config_path):
raise RuntimeError(
diff --git a/cloudinit/config/schemas/versions.schema.cloud-config.json b/cloudinit/config/schemas/versions.schema.cloud-config.json
index c606085c..bca0a11e 100644
--- a/cloudinit/config/schemas/versions.schema.cloud-config.json
+++ b/cloudinit/config/schemas/versions.schema.cloud-config.json
@@ -7,7 +7,7 @@
{
"properties": {
"version": {
- "enum": ["22.2", "v1"]
+ "enum": [ "v1" ]
}
}
},
diff --git a/cloudinit/net/netplan.py b/cloudinit/net/netplan.py
index 66ad598f..d63d86d8 100644
--- a/cloudinit/net/netplan.py
+++ b/cloudinit/net/netplan.py
@@ -276,12 +276,27 @@ class Renderer(renderer.Renderer):
LOG.debug("netplan net_setup_link postcmd disabled")
return
setup_lnk = ["udevadm", "test-builtin", "net_setup_link"]
- for cmd in [
- setup_lnk + [SYS_CLASS_NET + iface]
- for iface in get_devicelist()
- if os.path.islink(SYS_CLASS_NET + iface)
- ]:
- subp.subp(cmd, capture=True)
+
+ # It's possible we can race a udev rename and attempt to run
+ # net_setup_link on a device that no longer exists. When this happens,
+ # we don't know what the device was renamed to, so re-gather the
+ # entire list of devices and try again.
+ last_exception = Exception
+ for _ in range(5):
+ try:
+ for iface in get_devicelist():
+ if os.path.islink(SYS_CLASS_NET + iface):
+ subp.subp(
+ setup_lnk + [SYS_CLASS_NET + iface], capture=True
+ )
+ break
+ except subp.ProcessExecutionError as e:
+ last_exception = e
+ else:
+ raise RuntimeError(
+ "'udevadm test-builtin net_setup_link' unable to run "
+ "successfully for all devices."
+ ) from last_exception
def _render_content(self, network_state: NetworkState):
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 865238cf..803c4f25 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -974,7 +974,12 @@ class DataSourceAzure(sources.DataSource):
)
@azure_ds_telemetry_reporter
- def _report_ready_for_pps(self, *, create_marker: bool = True) -> None:
+ def _report_ready_for_pps(
+ self,
+ *,
+ create_marker: bool = True,
+ expect_url_error: bool = False,
+ ) -> None:
"""Report ready for PPS, creating the marker file upon completion.
:raises sources.InvalidMetaDataException: On error reporting ready.
@@ -982,9 +987,24 @@ class DataSourceAzure(sources.DataSource):
try:
self._report_ready()
except Exception as error:
- msg = "Failed reporting ready while in the preprovisioning pool."
- report_diagnostic_event(msg, logger_func=LOG.error)
- raise sources.InvalidMetaDataException(msg) from error
+ # Ignore HTTP failures for Savable PPS as the call may appear to
+ # fail if the network interface is unplugged or the VM is
+ # suspended before we process the response. Worst case scenario
+ # is that we failed to report ready for source PPS and this VM
+ # will be discarded shortly, no harm done.
+ if expect_url_error and isinstance(error, UrlError):
+ report_diagnostic_event(
+ "Ignoring http call failure, it was expected.",
+ logger_func=LOG.debug,
+ )
+ # The iso was ejected prior to reporting ready.
+ self._iso_dev = None
+ else:
+ msg = (
+ "Failed reporting ready while in the preprovisioning pool."
+ )
+ report_diagnostic_event(msg, logger_func=LOG.error)
+ raise sources.InvalidMetaDataException(msg) from error
if create_marker:
self._create_report_ready_marker()
@@ -1157,8 +1177,17 @@ class DataSourceAzure(sources.DataSource):
nl_sock = None
try:
nl_sock = netlink.create_bound_netlink_socket()
- self._report_ready_for_pps()
- self._teardown_ephemeral_networking()
+ self._report_ready_for_pps(expect_url_error=True)
+ try:
+ self._teardown_ephemeral_networking()
+ except subp.ProcessExecutionError as e:
+ report_diagnostic_event(
+ "Ignoring failure while tearing down networking, "
+ "NIC was likely unplugged: %r" % e,
+ logger_func=LOG.info,
+ )
+ self._ephemeral_dhcp_ctx = None
+
self._wait_for_nic_detach(nl_sock)
self._wait_for_hot_attached_primary_nic(nl_sock)
except netlink.NetlinkCreateSocketError as e:
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index b621fb6e..c399beb6 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -12,9 +12,10 @@ import abc
import copy
import json
import os
+import pickle
from collections import namedtuple
from enum import Enum, unique
-from typing import Any, Dict, List, Tuple
+from typing import Any, Dict, List, Optional, Tuple
from cloudinit import dmi, importer
from cloudinit import log as logging
@@ -373,14 +374,19 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
self.persist_instance_data()
return return_value
- def persist_instance_data(self):
+ def persist_instance_data(self, write_cache=True):
"""Process and write INSTANCE_JSON_FILE with all instance metadata.
Replace any hyphens with underscores in key names for use in template
processing.
+ :param write_cache: boolean set True to persist obj.pkl when
+ instance_link exists.
+
@return True on successful write, False otherwise.
"""
+ if write_cache and os.path.lexists(self.paths.instance_link):
+ pkl_store(self, self.paths.get_ipath_cur("obj_pkl"))
if hasattr(self, "_crawled_metadata"):
# Any datasource with _crawled_metadata will best represent
# most recent, 'raw' metadata
@@ -1063,4 +1069,43 @@ def list_from_depends(depends, ds_list):
return ret_list
+def pkl_store(obj: DataSource, fname: str) -> bool:
+ """Use pickle to serialize Datasource to a file as a cache.
+
+ :return: True on success
+ """
+ try:
+ pk_contents = pickle.dumps(obj)
+ except Exception:
+ util.logexc(LOG, "Failed pickling datasource %s", obj)
+ return False
+ try:
+ util.write_file(fname, pk_contents, omode="wb", mode=0o400)
+ except Exception:
+ util.logexc(LOG, "Failed pickling datasource to %s", fname)
+ return False
+ return True
+
+
+def pkl_load(fname: str) -> Optional[DataSource]:
+ """Use pickle to deserialize a instance Datasource from a cache file."""
+ pickle_contents = None
+ try:
+ pickle_contents = util.load_file(fname, decode=False)
+ except Exception as e:
+ if os.path.isfile(fname):
+ LOG.warning("failed loading pickle in %s: %s", fname, e)
+
+ # This is allowed so just return nothing successfully loaded...
+ if not pickle_contents:
+ return None
+ try:
+ return pickle.loads(pickle_contents)
+ except DatasourceUnpickleUserDataError:
+ return None
+ except Exception:
+ util.logexc(LOG, "Failed loading pickled blob from %s", fname)
+ return None
+
+
# vi: ts=4 expandtab
diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
index 19584a61..56f44339 100644
--- a/cloudinit/sources/helpers/azure.py
+++ b/cloudinit/sources/helpers/azure.py
@@ -363,8 +363,16 @@ def http_with_retries(
% (url, attempt, e, e.code, e.headers),
logger_func=LOG.debug,
)
- # Raise exception if we're out of time.
- if time() + retry_sleep >= timeout:
+ # Raise exception if we're out of time or network is unreachable.
+ # If network is unreachable:
+ # - retries will not resolve the situation
+ # - for reporting ready for PPS, this generally means VM was put
+ # to sleep or network interface was unplugged before we see
+ # the call complete successfully.
+ if (
+ time() + retry_sleep >= timeout
+ or "Network is unreachable" in str(e)
+ ):
raise
sleep(retry_sleep)
diff --git a/cloudinit/stages.py b/cloudinit/stages.py
index 66e12eed..132dd83b 100644
--- a/cloudinit/stages.py
+++ b/cloudinit/stages.py
@@ -6,7 +6,6 @@
import copy
import os
-import pickle
import sys
from collections import namedtuple
from typing import Dict, Iterable, List, Optional, Set
@@ -247,7 +246,7 @@ class Init(object):
# We try to restore from a current link and static path
# by using the instance link, if purge_cache was called
# the file wont exist.
- return _pkl_load(self.paths.get_ipath_cur("obj_pkl"))
+ return sources.pkl_load(self.paths.get_ipath_cur("obj_pkl"))
def _write_to_cache(self):
if self.datasource is None:
@@ -260,7 +259,9 @@ class Init(object):
omode="w",
content="",
)
- return _pkl_store(self.datasource, self.paths.get_ipath_cur("obj_pkl"))
+ return sources.pkl_store(
+ self.datasource, self.paths.get_ipath_cur("obj_pkl")
+ )
def _get_datasources(self):
# Any config provided???
@@ -973,38 +974,4 @@ def fetch_base_config():
)
-def _pkl_store(obj, fname):
- try:
- pk_contents = pickle.dumps(obj)
- except Exception:
- util.logexc(LOG, "Failed pickling datasource %s", obj)
- return False
- try:
- util.write_file(fname, pk_contents, omode="wb", mode=0o400)
- except Exception:
- util.logexc(LOG, "Failed pickling datasource to %s", fname)
- return False
- return True
-
-
-def _pkl_load(fname):
- pickle_contents = None
- try:
- pickle_contents = util.load_file(fname, decode=False)
- except Exception as e:
- if os.path.isfile(fname):
- LOG.warning("failed loading pickle in %s: %s", fname, e)
-
- # This is allowed so just return nothing successfully loaded...
- if not pickle_contents:
- return None
- try:
- return pickle.loads(pickle_contents)
- except sources.DatasourceUnpickleUserDataError:
- return None
- except Exception:
- util.logexc(LOG, "Failed loading pickled blob from %s", fname)
- return None
-
-
# vi: ts=4 expandtab
diff --git a/cloudinit/version.py b/cloudinit/version.py
index 061ea419..ec2621f7 100644
--- a/cloudinit/version.py
+++ b/cloudinit/version.py
@@ -4,7 +4,7 @@
#
# This file is part of cloud-init. See LICENSE file for license information.
-__VERSION__ = "22.2"
+__VERSION__ = "22.3"
_PACKAGED_VERSION = "@@PACKAGED_VERSION@@"
FEATURES = [
diff --git a/debian/changelog b/debian/changelog
index b19ae3d6..b876774f 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,30 @@
+cloud-init (22.3-0ubuntu1~22.10.1) kinetic; urgency=medium
+
+ * New upstream release.
+ + Release 22.3 (#1662) (LP: #1986703)
+ + sources: obj.pkl cache should be written anyime get_data is run
+ (#1669)
+ + schema: drop release number from version file (#1664)
+ + pycloudlib: bump to quiet azure HTTP info logs (#1668)
+ + test: fix wireguard integration tests (#1666)
+ + Github is deprecating the 18.04 runner starting 12.1 (#1665)
+ + integration tests: Ensure one setup for all tests (#1661)
+ + tests: ansible test fixes (#1660)
+ + Prevent concurrency issue in test_webhook_hander.py (#1658)
+ + Workaround net_setup_link race with udev (#1655) (LP: #1983516)
+ + test: drop erroneous lxd assertion, verify command succeeded (#1657)
+ + Fix Chrony usage on Centos Stream (#1648) (LP: #1885952)
+ [Sven Haardiek]
+ + sources/azure: handle network unreachable errors for savable PPS
+ (#1642) [Chris Patterson]
+ + Return cc_set_hostname to PER_INSTANCE frequency (#1651)
+ (LP: #1983811)
+ + test: Collect integration test time by default (#1638)
+ + test: Drop forced package install hack in lxd integration test (#1649)
+ + schema: Resolve user-data if --system given (#1644) (LP: #1983306)
+
+ -- Brett Holman <brett.holman@canonical.com> Fri, 19 Aug 2022 10:49:08 -0600
+
cloud-init (22.2-139-gb64ba456-0ubuntu1~22.10.1) kinetic; urgency=medium
* New upstream snapshot.
diff --git a/integration-requirements.txt b/integration-requirements.txt
index 8b9d0db7..95503012 100644
--- a/integration-requirements.txt
+++ b/integration-requirements.txt
@@ -1,5 +1,5 @@
# PyPI requirements for cloud-init integration testing
# https://cloudinit.readthedocs.io/en/latest/topics/integration_tests.html
#
-pycloudlib @ git+https://github.com/canonical/pycloudlib.git@68fe052baf6f32415b727d02ba2ba48b7a995bf2
+pycloudlib @ git+https://github.com/canonical/pycloudlib.git@7f5bf6e67cf79f31770c456196b2ce695c6ce165
pytest
diff --git a/templates/chrony.conf.centos.tmpl b/templates/chrony.conf.centos.tmpl
new file mode 100644
index 00000000..5b3542ef
--- /dev/null
+++ b/templates/chrony.conf.centos.tmpl
@@ -0,0 +1,45 @@
+## template:jinja
+# Use public servers from the pool.ntp.org project.
+# Please consider joining the pool (http://www.pool.ntp.org/join.html).
+{% if pools %}# pools
+{% endif %}
+{% for pool in pools -%}
+pool {{pool}} iburst
+{% endfor %}
+{%- if servers %}# servers
+{% endif %}
+{% for server in servers -%}
+server {{server}} iburst
+{% endfor %}
+
+# Record the rate at which the system clock gains/losses time.
+driftfile /var/lib/chrony/drift
+
+# Allow the system clock to be stepped in the first three updates
+# if its offset is larger than 1 second.
+makestep 1.0 3
+
+# Enable kernel synchronization of the real-time clock (RTC).
+rtcsync
+
+# Enable hardware timestamping on all interfaces that support it.
+#hwtimestamp *
+
+# Increase the minimum number of selectable sources required to adjust
+# the system clock.
+#minsources 2
+
+# Allow NTP client access from local network.
+#allow 192.168.0.0/16
+
+# Serve time even if not synchronized to a time source.
+#local stratum 10
+
+# Specify file containing keys for NTP authentication.
+#keyfile /etc/chrony.keys
+
+# Specify directory for log files.
+logdir /var/log/chrony
+
+# Select which information is logged.
+#log measurements statistics tracking
diff --git a/tests/integration_tests/bugs/test_lp1835584.py b/tests/integration_tests/bugs/test_lp1835584.py
index 4d669ee2..4e732446 100644
--- a/tests/integration_tests/bugs/test_lp1835584.py
+++ b/tests/integration_tests/bugs/test_lp1835584.py
@@ -84,7 +84,13 @@ def test_azure_kernel_upgrade_case_insensitive_uuid(
pytest.skip(
"Provide CLOUD_INIT_SOURCE to install expected working cloud-init"
)
- with session_cloud.launch() as instance:
+ with session_cloud.launch(
+ launch_kwargs={
+ "image_id": session_cloud.cloud_instance.daily_image(
+ cfg_image_spec.image_id, image_type=ImageType.PRO_FIPS
+ )
+ }
+ ) as instance:
# We can't use setup_image fixture here because we want to avoid
# taking a snapshot or cleaning the booted machine after cloud-init
# upgrade.
diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py
index 580fd6ad..6157bad8 100644
--- a/tests/integration_tests/conftest.py
+++ b/tests/integration_tests/conftest.py
@@ -7,7 +7,7 @@ import sys
from contextlib import contextmanager
from pathlib import Path
from tarfile import TarFile
-from typing import Dict, Iterator, Type
+from typing import Dict, Generator, Iterator, Type
import pytest
from pycloudlib.lxd.instance import LXDInstance
@@ -92,60 +92,20 @@ def disable_subp_usage(request):
pass
-@contextmanager
-def _session_cloud(
- request: pytest.FixtureRequest,
-) -> Iterator[IntegrationCloud]:
+@pytest.fixture(scope="session")
+def session_cloud() -> Generator[IntegrationCloud, None, None]:
if integration_settings.PLATFORM not in platforms.keys():
raise ValueError(
- "{} is an invalid PLATFORM specified in settings. "
- "Must be one of {}".format(
- integration_settings.PLATFORM, list(platforms.keys())
- )
+ f"{integration_settings.PLATFORM} is an invalid PLATFORM "
+ f"specified in settings. Must be one of {list(platforms.keys())}"
)
- integration_cloud_marker = request.node.get_closest_marker(
- "integration_cloud_args"
- )
- cloud_args = []
- cloud_kwargs = {}
- if integration_cloud_marker:
- cloud_args = integration_cloud_marker.args
- cloud_kwargs = integration_cloud_marker.kwargs
- cloud = platforms[integration_settings.PLATFORM](
- *cloud_args, **cloud_kwargs
- )
+ cloud = platforms[integration_settings.PLATFORM]()
cloud.emit_settings_to_log()
-
yield cloud
-
cloud.destroy()
-@pytest.fixture
-def session_cloud(
- request: pytest.FixtureRequest,
-) -> Iterator[IntegrationCloud]:
- with _session_cloud(request) as cloud:
- yield cloud
-
-
-@pytest.fixture(scope="module")
-def module_session_cloud(
- request: pytest.FixtureRequest,
-) -> Iterator[IntegrationCloud]:
- with _session_cloud(request) as cloud:
- yield cloud
-
-
-@pytest.fixture(scope="class")
-def class_session_cloud(
- request: pytest.FixtureRequest,
-) -> Iterator[IntegrationCloud]:
- with _session_cloud(request) as cloud:
- yield cloud
-
-
def get_validated_source(
session_cloud: IntegrationCloud,
source=integration_settings.CLOUD_INIT_SOURCE,
@@ -166,12 +126,11 @@ def get_validated_source(
return CloudInitSource.DEB_PACKAGE
elif source == "UPGRADE":
return CloudInitSource.UPGRADE
- raise ValueError(
- "Invalid value for CLOUD_INIT_SOURCE setting: {}".format(source)
- )
+ raise ValueError(f"Invalid value for CLOUD_INIT_SOURCE setting: {source}")
-def _setup_image(session_cloud: IntegrationCloud, request):
+@pytest.fixture(scope="session")
+def setup_image(session_cloud: IntegrationCloud, request):
"""Setup the target environment with the correct version of cloud-init.
So we can launch instances / run tests with the correct image
@@ -193,21 +152,6 @@ def _setup_image(session_cloud: IntegrationCloud, request):
request.addfinalizer(session_cloud.delete_snapshot)
-@pytest.fixture
-def setup_image(session_cloud: IntegrationCloud, request):
- _setup_image(session_cloud, request)
-
-
-@pytest.fixture(scope="module")
-def module_setup_image(module_session_cloud: IntegrationCloud, request):
- _setup_image(module_session_cloud, request)
-
-
-@pytest.fixture(scope="class")
-def class_setup_image(class_session_cloud: IntegrationCloud, request):
- _setup_image(class_session_cloud, request)
-
-
def _collect_logs(
instance: IntegrationInstance, node_id: str, test_failed: bool
):
@@ -329,19 +273,19 @@ def client(
@pytest.fixture(scope="module")
def module_client(
- request, fixture_utils, module_session_cloud, module_setup_image
+ request, fixture_utils, session_cloud, setup_image
) -> Iterator[IntegrationInstance]:
"""Provide a client that runs once per module."""
- with _client(request, fixture_utils, module_session_cloud) as client:
+ with _client(request, fixture_utils, session_cloud) as client:
yield client
@pytest.fixture(scope="class")
def class_client(
- request, fixture_utils, class_session_cloud, class_setup_image
+ request, fixture_utils, session_cloud, setup_image
) -> Iterator[IntegrationInstance]:
"""Provide a client that runs once per class."""
- with _client(request, fixture_utils, class_session_cloud) as client:
+ with _client(request, fixture_utils, session_cloud) as client:
yield client
diff --git a/tests/integration_tests/modules/test_ansible.py b/tests/integration_tests/modules/test_ansible.py
index 0328781e..eebc7be9 100644
--- a/tests/integration_tests/modules/test_ansible.py
+++ b/tests/integration_tests/modules/test_ansible.py
@@ -22,14 +22,35 @@ write_files:
content: |
[Unit]
Description=Serve a local git repo
+ Wants=repo_waiter.service
+ After=cloud-init-local.service
+ Before=cloud-config.service
+ Before=cloud-final.service
+
+ [Install]
+ WantedBy=cloud-init-local.service
[Service]
- ExecStart=/usr/bin/env python3 -m http.server --directory \
-/root/playbooks/.git
- Restart=on-failure
+ ExecStart=/usr/bin/env python3 -m http.server \
+ --directory /root/playbooks/.git
+
+ - path: /etc/systemd/system/repo_waiter.service
+ content: |
+ [Unit]
+ Description=Block boot until repo is available
+ After=repo_server.service
+ Before=cloud-final.service
[Install]
- WantedBy=cloud-final.service
+ WantedBy=cloud-init-local.service
+
+ # clone into temp directory to test that server is running
+ # sdnotify would be an alternative way to verify that the server is
+ # running and continue once it is up, but this is simple and works
+ [Service]
+ Type=oneshot
+ ExecStart=sh -c "while \
+ ! git clone http://0.0.0.0:8000/ $(mktemp -d); do sleep 0.1; done"
- path: /root/playbooks/ubuntu.yml
content: |
@@ -57,8 +78,11 @@ write_files:
- "{{ item }}"
state: latest
loop: "{{ packages }}"
-
+runcmd:
+ - [systemctl, enable, repo_server.service]
+ - [systemctl, enable, repo_waiter.service]
"""
+
INSTALL_METHOD = """
ansible:
install-method: {method}
@@ -67,15 +91,13 @@ ansible:
url: "http://0.0.0.0:8000/"
playbook-name: ubuntu.yml
full: true
-runcmd:
- - "systemctl enable repo_server.service"
"""
SETUP_REPO = f"cd {REPO_D} &&\
git init {REPO_D} &&\
git add {REPO_D}/roles/apt/tasks/main.yml {REPO_D}/ubuntu.yml &&\
git commit -m auto &&\
-git update-server-info"
+(cd {REPO_D}/.git; git update-server-info)"
def _test_ansible_pull_from_local_server(my_client):
@@ -84,15 +106,6 @@ def _test_ansible_pull_from_local_server(my_client):
my_client.execute("cloud-init clean --logs")
my_client.restart()
log = my_client.read_from_file("/var/log/cloud-init.log")
-
- # These ensure the repo used for ansible-pull works as expected
- assert my_client.execute("wget http://0.0.0.0:8000").ok
- assert my_client.execute("git clone http://0.0.0.0:8000/").ok
- assert "(dead)" not in my_client.execute(
- "systemctl status repo_server.service"
- )
-
- # Following assertions verify ansible behavior itself
verify_clean_log(log)
output_log = my_client.read_from_file("/var/log/cloud-init-output.log")
assert "ok=3" in output_log
diff --git a/tests/integration_tests/modules/test_lxd.py b/tests/integration_tests/modules/test_lxd.py
index f4045425..3443b74a 100644
--- a/tests/integration_tests/modules/test_lxd.py
+++ b/tests/integration_tests/modules/test_lxd.py
@@ -3,7 +3,6 @@
(This is ported from
``tests/cloud_tests/testcases/modules/lxd_bridge.yaml``.)
"""
-import re
import warnings
import pytest
@@ -30,10 +29,9 @@ lxd:
STORAGE_USER_DATA = """\
#cloud-config
-bootcmd: [ "apt-get --yes remove {0}", "! command -v {2}", "{3}" ]
lxd:
init:
- storage_backend: {1}
+ storage_backend: {}
"""
@@ -51,7 +49,7 @@ class TestLxdBridge:
verify_clean_log(cloud_init_log)
# The bridge should exist
- assert class_client.execute("ip addr show lxdbr0")
+ assert class_client.execute("ip addr show lxdbr0").ok
raw_network_config = class_client.execute("lxc network show lxdbr0")
network_config = yaml.safe_load(raw_network_config)
@@ -60,42 +58,31 @@ class TestLxdBridge:
def validate_storage(validate_client, pkg_name, command):
log = validate_client.read_from_file("/var/log/cloud-init.log")
- assert re.search(f"apt-get.*install.*{pkg_name}", log) is not None
verify_clean_log(log, ignore_deprecations=False)
return log
@pytest.mark.no_container
-@pytest.mark.user_data(
- STORAGE_USER_DATA.format("btrfs-progs", "btrfs", "mkfs.btrfs", "true")
-)
+@pytest.mark.user_data(STORAGE_USER_DATA.format("btrfs"))
def test_storage_btrfs(client):
validate_storage(client, "btrfs-progs", "mkfs.btrfs")
@pytest.mark.no_container
-@pytest.mark.user_data(
- STORAGE_USER_DATA.format(
- "lvm2",
- "lvm",
- "lvcreate",
- "apt-get install "
- "thin-provisioning-tools && systemctl unmask lvm2-lvmpolld.socket",
- )
-)
+@pytest.mark.user_data(STORAGE_USER_DATA.format("lvm"))
def test_storage_lvm(client):
log = client.read_from_file("/var/log/cloud-init.log")
# Note to self
- if "doesn't use thinpool by default on Ubuntu due to LP" not in log:
+ if (
+ "doesn't use thinpool by default on Ubuntu due to LP" not in log
+ and "-kvm" not in client.execute("uname -r")
+ ):
warnings.warn("LP 1982780 has been fixed, update to allow thinpools")
-
validate_storage(client, "lvm2", "lvcreate")
@pytest.mark.no_container
-@pytest.mark.user_data(
- STORAGE_USER_DATA.format("zfsutils-linux", "zfs", "zpool", "true")
-)
+@pytest.mark.user_data(STORAGE_USER_DATA.format("zfs"))
def test_storage_zfs(client):
validate_storage(client, "zfsutils-linux", "zpool")
diff --git a/tests/integration_tests/modules/test_wireguard.py b/tests/integration_tests/modules/test_wireguard.py
index 2e97c1fb..e658a9df 100644
--- a/tests/integration_tests/modules/test_wireguard.py
+++ b/tests/integration_tests/modules/test_wireguard.py
@@ -36,6 +36,12 @@ wireguard:
readinessprobe:
- ping -qc 5 192.168.254.1 2>&1 > /dev/null
- echo $? > /tmp/ping
+
+# wg-quick configures the system interfaces and routes, but we need to ssh in
+# stop the service at the end of cloud-init
+runcmd:
+ - [systemctl, stop, wg-quick@wg0.service]
+ - [systemctl, stop, wg-quick@wg1.service]
"""
@@ -83,9 +89,15 @@ class TestWireguard:
"269196eb9916617969dc5220c1a90d54",
),
# check if systemd started wg0
- ("systemctl is-active wg-quick@wg0", "active"),
+ (
+ "systemctl is-failed wg-quick@wg0; test $? -eq 1",
+ "inactive",
+ ),
# check if systemd started wg1
- ("systemctl is-active wg-quick@wg1", "active"),
+ (
+ "systemctl is-failed wg-quick@wg1; test $? -eq 1",
+ "inactive",
+ ),
# check readiness probe (ping wg0)
("cat /tmp/ping", "0"),
),
diff --git a/tests/integration_tests/util.py b/tests/integration_tests/util.py
index 18ca1917..7eec3a4a 100644
--- a/tests/integration_tests/util.py
+++ b/tests/integration_tests/util.py
@@ -58,6 +58,7 @@ def verify_clean_log(log: str, ignore_deprecations: bool = True):
"No lease found; using default endpoint",
# Ubuntu lxd storage
"thinpool by default on Ubuntu due to LP #1982780",
+ "WARNING]: Could not match supplied host pattern, ignoring:",
]
traceback_texts = []
if "oracle" in log:
diff --git a/tests/unittests/config/test_apt_source_v1.py b/tests/unittests/config/test_apt_source_v1.py
index 7c59d279..4ce412ce 100644
--- a/tests/unittests/config/test_apt_source_v1.py
+++ b/tests/unittests/config/test_apt_source_v1.py
@@ -43,13 +43,6 @@ class FakeDistro(object):
return
-class FakeDatasource:
- """Fake Datasource helper object"""
-
- def __init__(self):
- self.region = "region"
-
-
class TestAptSourceConfig(TestCase):
"""TestAptSourceConfig
Main Class to test apt_source configs
diff --git a/tests/unittests/config/test_apt_source_v3.py b/tests/unittests/config/test_apt_source_v3.py
index abb94340..5bb87385 100644
--- a/tests/unittests/config/test_apt_source_v3.py
+++ b/tests/unittests/config/test_apt_source_v3.py
@@ -45,13 +45,6 @@ MOCK_LSB_RELEASE_DATA = {
}
-class FakeDatasource:
- """Fake Datasource helper object"""
-
- def __init__(self):
- self.region = "region"
-
-
class TestAptSourceConfig(t_help.FilesystemMockingTestCase):
"""TestAptSourceConfig
Main Class to test apt configs
diff --git a/tests/unittests/config/test_schema.py b/tests/unittests/config/test_schema.py
index b556c4b5..a401ffd4 100644
--- a/tests/unittests/config/test_schema.py
+++ b/tests/unittests/config/test_schema.py
@@ -17,7 +17,9 @@ from types import ModuleType
from typing import List, Optional, Sequence, Set
import pytest
+import responses
+from cloudinit import stages
from cloudinit.config.schema import (
CLOUD_CONFIG_HEADER,
VERSIONED_USERDATA_SCHEMA_FILE,
@@ -50,6 +52,7 @@ from tests.unittests.helpers import (
skipUnlessHypothesisJsonSchema,
skipUnlessJsonSchema,
)
+from tests.unittests.util import FakeDataSource
M_PATH = "cloudinit.config.schema."
@@ -629,6 +632,7 @@ class TestCloudConfigExamples:
validate_cloudconfig_schema(config_load, schema, strict=True)
+@pytest.mark.usefixtures("fake_filesystem")
class TestValidateCloudConfigFile:
"""Tests for validate_cloudconfig_file."""
@@ -702,6 +706,78 @@ class TestValidateCloudConfigFile:
with pytest.raises(SchemaValidationError, match=error_msg):
validate_cloudconfig_file(config_file.strpath, schema, annotate)
+ @skipUnlessJsonSchema()
+ @responses.activate
+ @pytest.mark.parametrize("annotate", (True, False))
+ @mock.patch("cloudinit.url_helper.time.sleep")
+ @mock.patch(M_PATH + "os.getuid", return_value=0)
+ def test_validateconfig_file_include_validates_schema(
+ self, m_getuid, m_sleep, annotate, mocker
+ ):
+ """validate_cloudconfig_file raises errors on invalid schema
+ when user-data uses `#include`."""
+ schema = {"properties": {"p1": {"type": "string", "format": "string"}}}
+ included_data = "#cloud-config\np1: -1"
+ included_url = "http://asdf/user-data"
+ blob = f"#include {included_url}"
+ responses.add(responses.GET, included_url, included_data)
+
+ ci = stages.Init()
+ ci.datasource = FakeDataSource(blob)
+ mocker.patch(M_PATH + "Init", return_value=ci)
+
+ error_msg = (
+ "Cloud config schema errors: p1: -1 is not of type 'string'"
+ )
+ with pytest.raises(SchemaValidationError, match=error_msg):
+ validate_cloudconfig_file(None, schema, annotate)
+
+ @skipUnlessJsonSchema()
+ @responses.activate
+ @pytest.mark.parametrize("annotate", (True, False))
+ @mock.patch("cloudinit.url_helper.time.sleep")
+ @mock.patch(M_PATH + "os.getuid", return_value=0)
+ def test_validateconfig_file_include_success(
+ self, m_getuid, m_sleep, annotate, mocker
+ ):
+ """validate_cloudconfig_file raises errors on invalid schema
+ when user-data uses `#include`."""
+ schema = {"properties": {"p1": {"type": "string", "format": "string"}}}
+ included_data = "#cloud-config\np1: asdf"
+ included_url = "http://asdf/user-data"
+ blob = f"#include {included_url}"
+ responses.add(responses.GET, included_url, included_data)
+
+ ci = stages.Init()
+ ci.datasource = FakeDataSource(blob)
+ mocker.patch(M_PATH + "Init", return_value=ci)
+
+ validate_cloudconfig_file(None, schema, annotate)
+
+ @skipUnlessJsonSchema()
+ @pytest.mark.parametrize("annotate", (True, False))
+ @mock.patch("cloudinit.url_helper.time.sleep")
+ @mock.patch(M_PATH + "os.getuid", return_value=0)
+ def test_validateconfig_file_no_cloud_cfg(
+ self, m_getuid, m_sleep, annotate, capsys, mocker
+ ):
+ """validate_cloudconfig_file does noop with empty user-data."""
+ schema = {"properties": {"p1": {"type": "string", "format": "string"}}}
+ blob = ""
+
+ ci = stages.Init()
+ ci.datasource = FakeDataSource(blob)
+ mocker.patch(M_PATH + "Init", return_value=ci)
+
+ with pytest.raises(
+ SchemaValidationError,
+ match=re.escape(
+ "Cloud config schema errors: format-l1.c1: File None needs"
+ ' to begin with "#cloud-config"'
+ ),
+ ):
+ validate_cloudconfig_file(None, schema, annotate)
+
class TestSchemaDocMarkdown:
"""Tests for get_meta_doc."""
@@ -1539,15 +1615,15 @@ class TestMain:
out, _err = capsys.readouterr()
assert "Valid cloud-config: {0}\n".format(myyaml) == out
- @mock.patch("cloudinit.config.schema.read_cfg_paths")
- @mock.patch("cloudinit.config.schema.os.getuid", return_value=0)
+ @mock.patch(M_PATH + "os.getuid", return_value=0)
def test_main_validates_system_userdata(
- self, m_getuid, m_read_cfg_paths, capsys, paths
+ self, m_getuid, capsys, mocker, paths
):
"""When --system is provided, main validates system userdata."""
- m_read_cfg_paths.return_value = paths
- ud_file = paths.get_ipath_cur("userdata_raw")
- write_file(ud_file, b"#cloud-config\nntp:")
+ m_init = mocker.patch(M_PATH + "Init")
+ m_init.return_value.paths.get_ipath = paths.get_ipath_cur
+ cloud_config_file = paths.get_ipath_cur("cloud_config")
+ write_file(cloud_config_file, b"#cloud-config\nntp:")
myargs = ["mycmd", "--system"]
with mock.patch("sys.argv", myargs):
assert 0 == main(), "Expected 0 exit code"
diff --git a/tests/unittests/reporting/test_webhook_handler.py b/tests/unittests/reporting/test_webhook_handler.py
index bef457c7..2df71d93 100644
--- a/tests/unittests/reporting/test_webhook_handler.py
+++ b/tests/unittests/reporting/test_webhook_handler.py
@@ -106,13 +106,14 @@ class TestWebHookHandler:
for all messages to be posted
"""
responses.add(responses.POST, "http://localhost", status=404)
- for _ in range(20):
+ for _ in range(10):
report_start_event("name", "description")
start_time = time.time()
while time.time() - start_time < 3:
with suppress(AssertionError):
- assert 20 == caplog.text.count("Failed posting event")
+ assert 10 == caplog.text.count("Failed posting event")
break
+ time.sleep(0.01) # Force context switch
else:
pytest.fail(
"Expected 20 failures, only got "
diff --git a/tests/unittests/sources/test_azure.py b/tests/unittests/sources/test_azure.py
index 1c6d1eb1..4eceecf1 100644
--- a/tests/unittests/sources/test_azure.py
+++ b/tests/unittests/sources/test_azure.py
@@ -4319,6 +4319,146 @@ class TestProvisioning:
mock.call.create_bound_netlink_socket().close(),
]
+ @pytest.mark.parametrize(
+ "fabric_side_effect",
+ [
+ [[], []],
+ [
+ [
+ url_helper.UrlError(
+ requests.ConnectionError(
+ "Failed to establish a new connection: "
+ "[Errno 101] Network is unreachable"
+ )
+ )
+ ],
+ [],
+ ],
+ [
+ [url_helper.UrlError(requests.ReadTimeout("Read timed out"))],
+ [],
+ ],
+ ],
+ )
+ def test_savable_pps_early_unplug(self, fabric_side_effect):
+ self.imds_md["extended"]["compute"]["ppsType"] = "Savable"
+
+ nl_sock = mock.MagicMock()
+ self.mock_netlink.create_bound_netlink_socket.return_value = nl_sock
+ self.mock_netlink.wait_for_nic_detach_event.return_value = "eth9"
+ self.mock_netlink.wait_for_nic_attach_event.return_value = (
+ "ethAttached1"
+ )
+ self.mock_readurl.side_effect = [
+ mock.MagicMock(contents=json.dumps(self.imds_md).encode()),
+ mock.MagicMock(
+ contents=json.dumps(self.imds_md["network"]).encode()
+ ),
+ mock.MagicMock(contents=construct_ovf_env().encode()),
+ mock.MagicMock(contents=json.dumps(self.imds_md).encode()),
+ ]
+ self.mock_azure_get_metadata_from_fabric.side_effect = (
+ fabric_side_effect
+ )
+
+ # Fake DHCP teardown failure.
+ ipv4_net = self.mock_net_dhcp_EphemeralIPv4Network
+ ipv4_net.return_value.__exit__.side_effect = [
+ subp.ProcessExecutionError(
+ cmd=["failed", "cmd"],
+ stdout="test_stdout",
+ stderr="test_stderr",
+ exit_code=4,
+ ),
+ None,
+ ]
+
+ self.azure_ds._get_data()
+
+ assert self.mock_readurl.mock_calls == [
+ mock.call(
+ "http://169.254.169.254/metadata/instance?"
+ "api-version=2021-08-01&extended=true",
+ timeout=2,
+ headers={"Metadata": "true"},
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
+ infinite=False,
+ ),
+ mock.call(
+ "http://169.254.169.254/metadata/instance/network?"
+ "api-version=2021-08-01",
+ timeout=2,
+ headers={"Metadata": "true"},
+ retries=0,
+ exception_cb=mock.ANY,
+ infinite=True,
+ ),
+ mock.call(
+ "http://169.254.169.254/metadata/reprovisiondata?"
+ "api-version=2019-06-01",
+ timeout=2,
+ headers={"Metadata": "true"},
+ exception_cb=mock.ANY,
+ infinite=True,
+ log_req_resp=False,
+ ),
+ mock.call(
+ "http://169.254.169.254/metadata/instance?"
+ "api-version=2021-08-01&extended=true",
+ timeout=2,
+ headers={"Metadata": "true"},
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
+ infinite=False,
+ ),
+ ]
+
+ # Verify DHCP is setup twice.
+ assert self.mock_wrapping_setup_ephemeral_networking.mock_calls == [
+ mock.call(timeout_minutes=20),
+ mock.call(iface="ethAttached1", timeout_minutes=20),
+ ]
+ assert self.mock_net_dhcp_maybe_perform_dhcp_discovery.mock_calls == [
+ mock.call(None, dsaz.dhcp_log_cb),
+ mock.call("ethAttached1", dsaz.dhcp_log_cb),
+ ]
+ assert self.azure_ds._wireserver_endpoint == "10.11.12.13"
+ assert self.azure_ds._is_ephemeral_networking_up() is False
+
+ # Verify DMI usage.
+ assert self.mock_dmi_read_dmi_data.mock_calls == [
+ mock.call("chassis-asset-tag"),
+ mock.call("system-uuid"),
+ ]
+ assert self.azure_ds.metadata["instance-id"] == "fake-system-uuid"
+
+ # Verify IMDS metadata.
+ assert self.azure_ds.metadata["imds"] == self.imds_md
+
+ # Verify reporting ready twice.
+ assert self.mock_azure_get_metadata_from_fabric.mock_calls == [
+ mock.call(
+ endpoint="10.11.12.13",
+ iso_dev="/dev/sr0",
+ pubkey_info=None,
+ ),
+ mock.call(
+ endpoint="10.11.12.13",
+ iso_dev=None,
+ pubkey_info=None,
+ ),
+ ]
+
+ # Verify netlink operations for Savable PPS.
+ assert self.mock_netlink.mock_calls == [
+ mock.call.create_bound_netlink_socket(),
+ mock.call.wait_for_nic_detach_event(nl_sock),
+ mock.call.wait_for_nic_attach_event(nl_sock, ["ethAttached1"]),
+ mock.call.create_bound_netlink_socket().__bool__(),
+ mock.call.create_bound_netlink_socket().close(),
+ ]
+
@pytest.mark.parametrize("pps_type", ["Savable", "Running", "None"])
def test_recovery_pps(self, pps_type):
self.patched_reported_ready_marker_path.write_text("")
diff --git a/tests/unittests/sources/test_azure_helper.py b/tests/unittests/sources/test_azure_helper.py
index 8107b114..ff912bef 100644
--- a/tests/unittests/sources/test_azure_helper.py
+++ b/tests/unittests/sources/test_azure_helper.py
@@ -8,6 +8,7 @@ from xml.etree import ElementTree
from xml.sax.saxutils import escape, unescape
import pytest
+import requests
from cloudinit import url_helper
from cloudinit.sources.helpers import azure as azure_helper
@@ -409,6 +410,34 @@ class TestHttpWithRetries:
mock.call(retry_sleep)
]
+ def test_network_unreachable(self, caplog):
+ error = url_helper.UrlError(
+ requests.ConnectionError(
+ "Failed to establish a new connection: "
+ "[Errno 101] Network is unreachable"
+ )
+ )
+ self.m_readurl.side_effect = error
+
+ with pytest.raises(url_helper.UrlError) as exc_info:
+ azure_helper.http_with_retries(
+ "testurl",
+ headers={},
+ )
+
+ assert exc_info.value == error
+ assert caplog.record_tuples[1] == (
+ "cloudinit.sources.helpers.azure",
+ 10,
+ "Failed HTTP request with Azure endpoint testurl during "
+ "attempt 1 with exception: Failed to establish a new "
+ "connection: [Errno 101] Network is unreachable "
+ "(code=None headers={})",
+ )
+ assert len(caplog.record_tuples) == 3
+ assert self.m_time.mock_calls == [mock.call(), mock.call()]
+ assert self.m_sleep.mock_calls == []
+
@pytest.mark.parametrize(
"times,try_count,retry_sleep,timeout_minutes",
[
diff --git a/tests/unittests/sources/test_init.py b/tests/unittests/sources/test_init.py
index a42c6a72..52f6cbfc 100644
--- a/tests/unittests/sources/test_init.py
+++ b/tests/unittests/sources/test_init.py
@@ -17,6 +17,7 @@ from cloudinit.sources import (
UNSET,
DataSource,
canonical_cloud_id,
+ pkl_load,
redact_sensitive_keys,
)
from cloudinit.user_data import UserDataProcessor
@@ -672,8 +673,12 @@ class TestDataSource(CiTestCase):
def test_persist_instance_data_writes_ec2_metadata_when_set(self):
"""When ec2_metadata class attribute is set, persist to json."""
tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
datasource = DataSourceTestSubclassNet(
- self.sys_cfg, self.distro, Paths({"run_dir": tmp})
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
)
datasource.ec2_metadata = UNSET
datasource.get_data()
@@ -690,8 +695,12 @@ class TestDataSource(CiTestCase):
def test_persist_instance_data_writes_canonical_cloud_id_and_symlink(self):
"""canonical-cloud-id class attribute is set, persist to json."""
tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
datasource = DataSourceTestSubclassNet(
- self.sys_cfg, self.distro, Paths({"run_dir": tmp})
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
)
cloud_id_link = os.path.join(tmp, "cloud-id")
cloud_id_file = os.path.join(tmp, "cloud-id-my-cloud")
@@ -722,8 +731,12 @@ class TestDataSource(CiTestCase):
def test_persist_instance_data_writes_network_json_when_set(self):
"""When network_data.json class attribute is set, persist to json."""
tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
datasource = DataSourceTestSubclassNet(
- self.sys_cfg, self.distro, Paths({"run_dir": tmp})
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
)
datasource.get_data()
json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
@@ -736,6 +749,34 @@ class TestDataSource(CiTestCase):
{"network_json": "is good"}, instance_data["ds"]["network_json"]
)
+ def test_persist_instance_serializes_datasource_pickle(self):
+ """obj.pkl is written when instance link present and write_cache."""
+ tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
+ datasource = DataSourceTestSubclassNet(
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
+ )
+ pkl_cache_file = os.path.join(cloud_dir, "instance/obj.pkl")
+ self.assertFalse(os.path.exists(pkl_cache_file))
+ datasource.network_json = {"network_json": "is good"}
+ # No /var/lib/cloud/instance symlink
+ datasource.persist_instance_data(write_cache=True)
+ self.assertFalse(os.path.exists(pkl_cache_file))
+
+ # Symlink /var/lib/cloud/instance but write_cache=False
+ util.sym_link(cloud_dir, os.path.join(cloud_dir, "instance"))
+ datasource.persist_instance_data(write_cache=False)
+ self.assertFalse(os.path.exists(pkl_cache_file))
+
+ # Symlink /var/lib/cloud/instance and write_cache=True
+ datasource.persist_instance_data(write_cache=True)
+ self.assertTrue(os.path.exists(pkl_cache_file))
+ ds = pkl_load(pkl_cache_file)
+ self.assertEqual(datasource.network_json, ds.network_json)
+
def test_get_data_base64encodes_unserializable_bytes(self):
"""On py3, get_data base64encodes any unserializable content."""
tmp = self.tmp_dir()
diff --git a/tests/unittests/test_data.py b/tests/unittests/test_data.py
index 75c304a8..eda04093 100644
--- a/tests/unittests/test_data.py
+++ b/tests/unittests/test_data.py
@@ -16,23 +16,13 @@ import httpretty
from cloudinit import handlers
from cloudinit import helpers as c_helpers
-from cloudinit import log, safeyaml, sources, stages
+from cloudinit import log, safeyaml, stages
from cloudinit import user_data as ud
from cloudinit import util
from cloudinit.config.modules import Modules
from cloudinit.settings import PER_INSTANCE
from tests.unittests import helpers
-
-INSTANCE_ID = "i-testing"
-
-
-class FakeDataSource(sources.DataSource):
- def __init__(self, userdata=None, vendordata=None, vendordata2=None):
- sources.DataSource.__init__(self, {}, None, None)
- self.metadata = {"instance-id": INSTANCE_ID}
- self.userdata_raw = userdata
- self.vendordata_raw = vendordata
- self.vendordata2_raw = vendordata2
+from tests.unittests.util import FakeDataSource
def count_messages(root):
diff --git a/tests/unittests/test_stages.py b/tests/unittests/test_stages.py
index 9fa2e629..7fde2bac 100644
--- a/tests/unittests/test_stages.py
+++ b/tests/unittests/test_stages.py
@@ -11,31 +11,11 @@ from cloudinit.event import EventScope, EventType
from cloudinit.sources import NetworkConfigSource
from cloudinit.util import write_file
from tests.unittests.helpers import mock
+from tests.unittests.util import TEST_INSTANCE_ID, FakeDataSource
-TEST_INSTANCE_ID = "i-testing"
M_PATH = "cloudinit.stages."
-class FakeDataSource(sources.DataSource):
- def __init__(
- self, paths=None, userdata=None, vendordata=None, network_config=""
- ):
- super(FakeDataSource, self).__init__({}, None, paths=paths)
- self.metadata = {"instance-id": TEST_INSTANCE_ID}
- self.userdata_raw = userdata
- self.vendordata_raw = vendordata
- self._network_config = None
- if network_config: # Permit for None value to setup attribute
- self._network_config = network_config
-
- @property
- def network_config(self):
- return self._network_config
-
- def _get_data(self):
- return True
-
-
class TestInit:
@pytest.fixture(autouse=True)
def setup(self, tmpdir):
diff --git a/tests/unittests/test_upgrade.py b/tests/unittests/test_upgrade.py
index d7a721a2..ed3c7efb 100644
--- a/tests/unittests/test_upgrade.py
+++ b/tests/unittests/test_upgrade.py
@@ -18,7 +18,7 @@ import pathlib
import pytest
-from cloudinit.stages import _pkl_load
+from cloudinit.sources import pkl_load
from tests.unittests.helpers import resourceLocation
@@ -34,7 +34,7 @@ class TestUpgrade:
Test implementations _must not_ modify the ``previous_obj_pkl`` which
they are passed, as that will affect tests that run after them.
"""
- return _pkl_load(str(request.param))
+ return pkl_load(str(request.param))
def test_networking_set_on_distro(self, previous_obj_pkl):
"""We always expect to have ``.networking`` on ``Distro`` objects."""
diff --git a/tests/unittests/util.py b/tests/unittests/util.py
index c7dc73b9..4635ca3f 100644
--- a/tests/unittests/util.py
+++ b/tests/unittests/util.py
@@ -2,7 +2,7 @@
from unittest import mock
from cloudinit import cloud, distros, helpers
-from cloudinit.sources import DataSourceHostname
+from cloudinit.sources import DataSource, DataSourceHostname
from cloudinit.sources.DataSourceNone import DataSourceNone
@@ -147,3 +147,32 @@ class MockDistro(distros.Distro):
def update_package_sources(self):
return (True, "yay")
+
+
+TEST_INSTANCE_ID = "i-testing"
+
+
+class FakeDataSource(DataSource):
+ def __init__(
+ self,
+ userdata=None,
+ vendordata=None,
+ vendordata2=None,
+ network_config="",
+ paths=None,
+ ):
+ DataSource.__init__(self, {}, None, paths=paths)
+ self.metadata = {"instance-id": TEST_INSTANCE_ID}
+ self.userdata_raw = userdata
+ self.vendordata_raw = vendordata
+ self.vendordata2_raw = vendordata2
+ self._network_config = None
+ if network_config: # Permit for None value to setup attribute
+ self._network_config = network_config
+
+ @property
+ def network_config(self):
+ return self._network_config
+
+ def _get_data(self):
+ return True
diff --git a/tools/.github-cla-signers b/tools/.github-cla-signers
index 6d037c67..2a69fd57 100644
--- a/tools/.github-cla-signers
+++ b/tools/.github-cla-signers
@@ -89,6 +89,7 @@ rongz609
SadeghHayeri
sarahwzadara
scorpion44
+shaardie
shi2wei3
slingamn
slyon
diff --git a/tox.ini b/tox.ini
index 4765d3bd..055b1f07 100644
--- a/tox.ini
+++ b/tox.ini
@@ -263,7 +263,10 @@ deps = isort
commands = {[testenv:isort]commands}
[testenv:integration-tests]
-commands = {envpython} -m pytest --log-cli-level=INFO -vv {posargs:tests/integration_tests}
+commands = {envpython} -m pytest -vv \
+ --log-cli-level=INFO \
+ --durations 10 \
+ {posargs:tests/integration_tests}
deps =
-r{toxinidir}/integration-requirements.txt
passenv = CLOUD_INIT_* PYCLOUDLIB_* SSH_AUTH_SOCK OS_*