summaryrefslogtreecommitdiff
path: root/tests/unittests/sources/test_init.py
diff options
context:
space:
mode:
authorChad Smith <chad.smith@canonical.com>2022-08-17 22:30:57 -0600
committerGitHub <noreply@github.com>2022-08-17 22:30:57 -0600
commit923e140d4443a3732fab1cc0229a13caed5d929a (patch)
tree82053b07a542b9423292d2c043827004ecd5a647 /tests/unittests/sources/test_init.py
parent66d4095c8260a73209a98a2cc9b52623b69f1fb7 (diff)
downloadcloud-init-git-923e140d4443a3732fab1cc0229a13caed5d929a.tar.gz
sources: obj.pkl cache should be written anyime get_data is run (#1669)
When metadata update events trigger a new datasource.get_data run ensure we are syncing the cached obj.pkl to disk so subsequent boot stages can leverage the updated metadata. Add write_cache param to persist_instance_data to avoid persisting instance data when init.ds_restored from cache. This avoids a race on clouds where network config is updated per boot in init-local timeframe but init-network uses stale network metadata from cache because updated metadata was not persisted. Migate _pkl_load and _pkl_store out of stages module and into sources as it really is only applicable to datasource serialization.
Diffstat (limited to 'tests/unittests/sources/test_init.py')
-rw-r--r--tests/unittests/sources/test_init.py47
1 files changed, 44 insertions, 3 deletions
diff --git a/tests/unittests/sources/test_init.py b/tests/unittests/sources/test_init.py
index a42c6a72..52f6cbfc 100644
--- a/tests/unittests/sources/test_init.py
+++ b/tests/unittests/sources/test_init.py
@@ -17,6 +17,7 @@ from cloudinit.sources import (
UNSET,
DataSource,
canonical_cloud_id,
+ pkl_load,
redact_sensitive_keys,
)
from cloudinit.user_data import UserDataProcessor
@@ -672,8 +673,12 @@ class TestDataSource(CiTestCase):
def test_persist_instance_data_writes_ec2_metadata_when_set(self):
"""When ec2_metadata class attribute is set, persist to json."""
tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
datasource = DataSourceTestSubclassNet(
- self.sys_cfg, self.distro, Paths({"run_dir": tmp})
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
)
datasource.ec2_metadata = UNSET
datasource.get_data()
@@ -690,8 +695,12 @@ class TestDataSource(CiTestCase):
def test_persist_instance_data_writes_canonical_cloud_id_and_symlink(self):
"""canonical-cloud-id class attribute is set, persist to json."""
tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
datasource = DataSourceTestSubclassNet(
- self.sys_cfg, self.distro, Paths({"run_dir": tmp})
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
)
cloud_id_link = os.path.join(tmp, "cloud-id")
cloud_id_file = os.path.join(tmp, "cloud-id-my-cloud")
@@ -722,8 +731,12 @@ class TestDataSource(CiTestCase):
def test_persist_instance_data_writes_network_json_when_set(self):
"""When network_data.json class attribute is set, persist to json."""
tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
datasource = DataSourceTestSubclassNet(
- self.sys_cfg, self.distro, Paths({"run_dir": tmp})
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
)
datasource.get_data()
json_file = self.tmp_path(INSTANCE_JSON_FILE, tmp)
@@ -736,6 +749,34 @@ class TestDataSource(CiTestCase):
{"network_json": "is good"}, instance_data["ds"]["network_json"]
)
+ def test_persist_instance_serializes_datasource_pickle(self):
+ """obj.pkl is written when instance link present and write_cache."""
+ tmp = self.tmp_dir()
+ cloud_dir = os.path.join(tmp, "cloud")
+ util.ensure_dir(cloud_dir)
+ datasource = DataSourceTestSubclassNet(
+ self.sys_cfg,
+ self.distro,
+ Paths({"run_dir": tmp, "cloud_dir": cloud_dir}),
+ )
+ pkl_cache_file = os.path.join(cloud_dir, "instance/obj.pkl")
+ self.assertFalse(os.path.exists(pkl_cache_file))
+ datasource.network_json = {"network_json": "is good"}
+ # No /var/lib/cloud/instance symlink
+ datasource.persist_instance_data(write_cache=True)
+ self.assertFalse(os.path.exists(pkl_cache_file))
+
+ # Symlink /var/lib/cloud/instance but write_cache=False
+ util.sym_link(cloud_dir, os.path.join(cloud_dir, "instance"))
+ datasource.persist_instance_data(write_cache=False)
+ self.assertFalse(os.path.exists(pkl_cache_file))
+
+ # Symlink /var/lib/cloud/instance and write_cache=True
+ datasource.persist_instance_data(write_cache=True)
+ self.assertTrue(os.path.exists(pkl_cache_file))
+ ds = pkl_load(pkl_cache_file)
+ self.assertEqual(datasource.network_json, ds.network_json)
+
def test_get_data_base64encodes_unserializable_bytes(self):
"""On py3, get_data base64encodes any unserializable content."""
tmp = self.tmp_dir()