summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--debian/changelog14
-rw-r--r--debian/cloud-init.preinst75
-rw-r--r--debian/patches/lp-1506187-azure_use_unique_vm_id.patch449
-rw-r--r--debian/patches/lp-1506244-azure-ssh-key-values.patch150
-rw-r--r--debian/patches/series2
5 files changed, 688 insertions, 2 deletions
diff --git a/debian/changelog b/debian/changelog
index 64279daa..6229bbdb 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,17 @@
+cloud-init (0.7.5-0ubuntu1.15) trusty; urgency=medium
+
+ * Microsoft Azure:
+ - d/patches/lp-1506244-azure-ssh-key-values.patch: AZURE: Add support
+ and preference for fabric provided public SSH public key values over
+ fingerprints (LP: #1506244).
+ - use stable VM instance ID over SharedConfig.xml (LP: #1506187):
+ - d/patches/lp-1506187-azure_use_unique_vm_id.patch: use DMI data for
+ the stable VM instance ID
+ - d/cloud-init.preinst: migrate existing instances to stable VM instance
+ ID on upgrade from prior versions of cloud-init.
+
+ -- Ben Howard <ben.howard@ubuntu.com> Tue, 17 Nov 2015 10:02:24 -0700
+
cloud-init (0.7.5-0ubuntu1.14) trusty; urgency=medium
* d/patches/lp-1177432-same-archives-as-ubuntu-server.patch: use the
diff --git a/debian/cloud-init.preinst b/debian/cloud-init.preinst
index e5c7755f..43922665 100644
--- a/debian/cloud-init.preinst
+++ b/debian/cloud-init.preinst
@@ -105,6 +105,73 @@ convert_varlib_05x_06x() {
return 0
}
+azure_apply_new_instance_id_1506187() {
+ # With LP: #1506187, the Azure instance ID detection method was changed
+ # to use the DMI data. In order to prevent existing instances from thinking
+ # they are new instances, the instance ID needs to be updated here.
+
+ if grep DataSourceAzure /var/lib/cloud/instance/datasource > /dev/null 2>&1; then
+
+ product_id_f="/sys/devices/virtual/dmi/id/product_uuid"
+ instance_id_f="/var/lib/cloud/data/instance-id"
+
+ if [ ! -e "${product_id_f}" -o ! -e "${instance_id_f}" ]; then
+ return 0
+ fi
+
+ # Get the current instance ID's (new and old)
+ new_instance_id="$(cat ${product_id_f})"
+ old_instance_id="$(cat ${instance_id_f})"
+
+ if [ "${new_instance_id}" = "${old_instance_id}" ]; then
+ # this may have been applied for a prior version, i.e. upgrading
+ # from 14.04 to 16.04
+ return 0
+
+ elif [ -z "${new_instance_id}" -o -z "${old_instance_id}" ]; then
+ cat <<EOM
+
+WARNING: Failed to migrate old instance ID to new instance ID.
+ Cloud-init may detect this instance as a new instance upon reboot.
+ Please see: https://bugs.launchpad.net/bug/1506187
+
+EOM
+
+ elif [ "${new_instance_id}" != "${old_instance_id}" ]; then
+ cat <<EOM
+
+AZURE: this instance uses an unstable instance ID. Cloud-init will
+ migrate the instance ID from:
+ ${old_instance_id}
+ to:
+ ${new_instance_id}
+ For more information about this change, please see:
+ https://bugs.launchpad.net/bug/1506187
+ https://azure.microsoft.com/en-us/blog/accessing-and-using-azure-vm-unique-id
+
+EOM
+
+ # Write the new instance id
+ echo "${new_instance_id}" > /var/lib/cloud/data/instance-id
+
+ # Remove the symlink for the instance
+ rm /var/lib/cloud/instance
+
+ # Rename the old instance id to the new one
+ mv /var/lib/cloud/instances/${old_instance_id} \
+ /var/lib/cloud/instances/${new_instance_id}
+
+ # Link the old id to the new one, just incase
+ ln -s /var/lib/cloud/instances/${new_instance_id} \
+ /var/lib/cloud/instances/${old_instance_id}
+
+ # Make the active instance the new id
+ ln -s /var/lib/cloud/instances/${new_instance_id} \
+ /var/lib/cloud/instance
+ fi
+fi
+}
+
case "$1" in
install|upgrade)
# removing obsolete conffiles from the 'ec2-init' package
@@ -130,7 +197,7 @@ case "$1" in
for f in ${old_confs}; do
rm_conffile cloud-init "/etc/init/${f}.conf"
done
- fi
+ fi
if dpkg --compare-versions "$2" le "0.5.11-0ubuntu1"; then
# rename the config entries in sem/ so they're not run again
@@ -159,7 +226,11 @@ case "$1" in
if [ -e /var/lib/cloud/instance/sem/user-scripts ]; then
ln -sf user-scripts /var/lib/cloud/instance/sem/config-scripts-user
fi
-
+
+ # 0.7.7-bzr1556 introduced new instance ID source for Azure
+ if dpkg --compare-versions "$2" le "0.7.5-0ubuntu1.15"; then
+ azure_apply_new_instance_id_1506187
+ fi
d=/etc/cloud/
if [ -f "$d/distro.cfg" ] && [ ! -f "$d/cloud.cfg.d/90_dpkg.cfg" ]; then
diff --git a/debian/patches/lp-1506187-azure_use_unique_vm_id.patch b/debian/patches/lp-1506187-azure_use_unique_vm_id.patch
new file mode 100644
index 00000000..1c181560
--- /dev/null
+++ b/debian/patches/lp-1506187-azure_use_unique_vm_id.patch
@@ -0,0 +1,449 @@
+--- a/cloudinit/sources/helpers/azure.py
++++ b/cloudinit/sources/helpers/azure.py
+@@ -79,12 +79,6 @@
+ './Container/RoleInstanceList/RoleInstance/InstanceId')
+
+ @property
+- def shared_config_xml(self):
+- url = self._text_from_xpath('./Container/RoleInstanceList/RoleInstance'
+- '/Configuration/SharedConfig')
+- return self.http_client.get(url).contents
+-
+- @property
+ def certificates_xml(self):
+ if self._certificates_xml is None:
+ url = self._text_from_xpath(
+@@ -172,19 +166,6 @@
+ return keys
+
+
+-def iid_from_shared_config_content(content):
+- """
+- find INSTANCE_ID in:
+- <?xml version="1.0" encoding="utf-8"?>
+- <SharedConfig version="1.0.0.0" goalStateIncarnation="1">
+- <Deployment name="INSTANCE_ID" guid="{...}" incarnation="0">
+- <Service name="..." guid="{00000000-0000-0000-0000-000000000000}"/>
+- """
+- root = ElementTree.fromstring(content)
+- depnode = root.find('Deployment')
+- return depnode.get('name')
+-
+-
+ class WALinuxAgentShim(object):
+
+ REPORT_READY_XML_TEMPLATE = '\n'.join([
+@@ -263,8 +244,6 @@
+ public_keys = self.openssl_manager.parse_certificates(
+ goal_state.certificates_xml)
+ data = {
+- 'instance-id': iid_from_shared_config_content(
+- goal_state.shared_config_xml),
+ 'public-keys': public_keys,
+ }
+ self._report_ready(goal_state, http_client)
+--- a/cloudinit/sources/DataSourceAzure.py
++++ b/cloudinit/sources/DataSourceAzure.py
+@@ -31,8 +31,7 @@
+ from cloudinit.settings import PER_ALWAYS
+ from cloudinit import sources
+ from cloudinit import util
+-from cloudinit.sources.helpers.azure import (
+- get_metadata_from_fabric, iid_from_shared_config_content)
++from cloudinit.sources.helpers.azure import get_metadata_from_fabric
+
+ LOG = logging.getLogger(__name__)
+
+@@ -41,7 +40,6 @@
+ AGENT_START = ['service', 'walinuxagent', 'start']
+ BOUNCE_COMMAND = ['sh', '-xc',
+ "i=$interface; x=0; ifdown $i || x=$?; ifup $i || x=$?; exit $x"]
+-DATA_DIR_CLEAN_LIST = ['SharedConfig.xml']
+
+ BUILTIN_DS_CONFIG = {
+ 'agent_command': AGENT_START,
+@@ -144,8 +142,6 @@
+ self.ds_cfg['agent_command'])
+
+ ddir = self.ds_cfg['data_dir']
+- shcfgxml = os.path.join(ddir, "SharedConfig.xml")
+- wait_for = [shcfgxml]
+
+ fp_files = []
+ key_value = None
+@@ -160,19 +156,11 @@
+
+ missing = util.log_time(logfunc=LOG.debug, msg="waiting for files",
+ func=wait_for_files,
+- args=(wait_for + fp_files,))
++ args=(fp_files,))
+ if len(missing):
+ LOG.warn("Did not find files, but going on: %s", missing)
+
+ metadata = {}
+- if shcfgxml in missing:
+- LOG.warn("SharedConfig.xml missing, using static instance-id")
+- else:
+- try:
+- metadata['instance-id'] = iid_from_shared_config(shcfgxml)
+- except ValueError as e:
+- LOG.warn("failed to get instance id in %s: %s", shcfgxml, e)
+-
+ metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
+ return metadata
+
+@@ -228,21 +216,6 @@
+ user_ds_cfg = util.get_cfg_by_path(self.cfg, DS_CFG_PATH, {})
+ self.ds_cfg = util.mergemanydict([user_ds_cfg, self.ds_cfg])
+
+- if found != ddir:
+- cached_ovfenv = util.load_file(
+- os.path.join(ddir, 'ovf-env.xml'), quiet=True)
+- if cached_ovfenv != files['ovf-env.xml']:
+- # source was not walinux-agent's datadir, so we have to clean
+- # up so 'wait_for_files' doesn't return early due to stale data
+- cleaned = []
+- for f in [os.path.join(ddir, f) for f in DATA_DIR_CLEAN_LIST]:
+- if os.path.exists(f):
+- util.del_file(f)
+- cleaned.append(f)
+- if cleaned:
+- LOG.info("removed stale file(s) in '%s': %s",
+- ddir, str(cleaned))
+-
+ # walinux agent writes files world readable, but expects
+ # the directory to be protected.
+ write_files(ddir, files, dirmode=0o700)
+@@ -258,6 +231,7 @@
+ " on Azure.", exc_info=True)
+ return False
+
++ self.metadata['instance-id'] = get_instance_id()
+ self.metadata.update(fabric_data)
+
+ found_ephemeral = find_fabric_formatted_ephemeral_disk()
+@@ -282,6 +256,13 @@
+ return len(fnmatch.filter(os.listdir(mp), '*[!cdrom]*'))
+
+
++def get_instance_id():
++ """
++ Read the instance ID from dmi data
++ """
++ return util.read_dmi_data('system-uuid')
++
++
+ def find_fabric_formatted_ephemeral_part():
+ """
+ Locate the first fabric formatted ephemeral device.
+@@ -515,7 +496,7 @@
+ continue
+
+ if (len(child.childNodes) != 1 or
+- child.childNodes[0].nodeType != text_node):
++ child.childNodes[0].nodeType != text_node):
+ continue
+
+ cur[name] = child.childNodes[0].wholeText.strip()
+@@ -570,7 +551,7 @@
+ simple = False
+ value = ""
+ if (len(child.childNodes) == 1 and
+- child.childNodes[0].nodeType == dom.TEXT_NODE):
++ child.childNodes[0].nodeType == dom.TEXT_NODE):
+ simple = True
+ value = child.childNodes[0].wholeText
+
+@@ -648,12 +629,6 @@
+ return (md, ud, cfg, {'ovf-env.xml': contents})
+
+
+-def iid_from_shared_config(path):
+- with open(path, "rb") as fp:
+- content = fp.read()
+- return iid_from_shared_config_content(content)
+-
+-
+ class BrokenAzureDataSource(Exception):
+ pass
+
+--- a/tests/unittests/test_datasource/test_azure.py
++++ b/tests/unittests/test_datasource/test_azure.py
+@@ -120,15 +120,13 @@
+ data['pubkey_files'] = flist
+ return ["pubkey_from: %s" % f for f in flist]
+
+- def _iid_from_shared_config(path):
+- data['iid_from_shared_cfg'] = path
++ def _get_instance_id():
+ return 'i-my-azure-id'
+
+ if data.get('ovfcontent') is not None:
+ populate_dir(os.path.join(self.paths.seed_dir, "azure"),
+ {'ovf-env.xml': data['ovfcontent']})
+
+-
+ mod = DataSourceAzure
+ mod.BUILTIN_DS_CONFIG = OVERRIDE_BUILTIN_DS_CONFIG
+ mod.BUILTIN_DS_CONFIG['data_dir'] = self.waagent_d
+@@ -139,8 +137,7 @@
+ (mod, 'wait_for_files', _wait_for_files),
+ (mod, 'pubkeys_from_crt_files',
+ _pubkeys_from_crt_files),
+- (mod, 'iid_from_shared_config',
+- _iid_from_shared_config)])
++ (mod, 'get_instance_id', _get_instance_id)])
+
+ dsrc = mod.DataSourceAzureNet(
+ data.get('sys_cfg', {}), distro=None, paths=self.paths)
+@@ -209,7 +206,7 @@
+ yaml_cfg = "{agent_command: my_command}\n"
+ cfg = yaml.safe_load(yaml_cfg)
+ odata = {'HostName': "myhost", 'UserName': "myuser",
+- 'dscfg': {'text': yaml_cfg, 'encoding': 'plain'}}
++ 'dscfg': {'text': yaml_cfg, 'encoding': 'plain'}}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
+
+ dsrc = self._get_ds(data)
+@@ -221,8 +218,8 @@
+ # set dscfg in via base64 encoded yaml
+ cfg = {'agent_command': "my_command"}
+ odata = {'HostName': "myhost", 'UserName': "myuser",
+- 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)),
+- 'encoding': 'base64'}}
++ 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)),
++ 'encoding': 'base64'}}
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
+
+ dsrc = self._get_ds(data)
+@@ -269,7 +266,8 @@
+ # should equal that after the '$'
+ pos = defuser['passwd'].rfind("$") + 1
+ self.assertEqual(defuser['passwd'],
+- crypt.crypt(odata['UserPassword'], defuser['passwd'][0:pos]))
++ crypt.crypt(odata['UserPassword'],
++ defuser['passwd'][0:pos]))
+
+ def test_userdata_found(self):
+ mydata = "FOOBAR"
+@@ -282,7 +280,7 @@
+ self.assertEqual(dsrc.userdata_raw, mydata)
+
+ def test_no_datasource_expected(self):
+- #no source should be found if no seed_dir and no devs
++ # no source should be found if no seed_dir and no devs
+ data = {}
+ dsrc = self._get_ds({})
+ ret = dsrc.get_data()
+@@ -334,7 +332,6 @@
+ for mypk in mypklist:
+ self.assertIn(mypk['value'], dsrc.metadata['public-keys'])
+
+-
+ def test_disabled_bounce(self):
+ pass
+
+@@ -360,8 +357,8 @@
+ # Make sure that user can affect disk aliases
+ dscfg = {'disk_aliases': {'ephemeral0': '/dev/sdc'}}
+ odata = {'HostName': "myhost", 'UserName': "myuser",
+- 'dscfg': {'text': base64.b64encode(yaml.dump(dscfg)),
+- 'encoding': 'base64'}}
++ 'dscfg': {'text': base64.b64encode(yaml.dump(dscfg)),
++ 'encoding': 'base64'}}
+ usercfg = {'disk_setup': {'/dev/sdc': {'something': '...'},
+ 'ephemeral0': False}}
+ userdata = '#cloud-config' + yaml.dump(usercfg) + "\n"
+@@ -420,83 +417,22 @@
+ self.assertTrue(os.path.exists(ovf_env_path))
+ self.xml_equals(xml, load_file(ovf_env_path))
+
+- def test_existing_ovf_same(self):
+- # waagent/SharedConfig left alone if found ovf-env.xml same as cached
+- odata = {'UserData': base64.b64encode("SOMEUSERDATA")}
+- data = {'ovfcontent': construct_valid_ovf_env(data=odata)}
+-
+- populate_dir(self.waagent_d,
+- {'ovf-env.xml': data['ovfcontent'],
+- 'otherfile': 'otherfile-content',
+- 'SharedConfig.xml': 'mysharedconfig'})
+-
+- dsrc = self._get_ds(data)
+- ret = dsrc.get_data()
+- self.assertTrue(ret)
+- self.assertTrue(os.path.exists(
+- os.path.join(self.waagent_d, 'ovf-env.xml')))
+- self.assertTrue(os.path.exists(
+- os.path.join(self.waagent_d, 'otherfile')))
+- self.assertTrue(os.path.exists(
+- os.path.join(self.waagent_d, 'SharedConfig.xml')))
+-
+- def test_existing_ovf_diff(self):
+- # waagent/SharedConfig must be removed if ovfenv is found elsewhere
+-
+- # 'get_data' should remove SharedConfig.xml in /var/lib/waagent
+- # if ovf-env.xml differs.
+- cached_ovfenv = construct_valid_ovf_env(
+- {'userdata': base64.b64encode("FOO_USERDATA")})
+- new_ovfenv = construct_valid_ovf_env(
+- {'userdata': base64.b64encode("NEW_USERDATA")})
+-
+- populate_dir(self.waagent_d,
+- {'ovf-env.xml': cached_ovfenv,
+- 'SharedConfig.xml': "mysharedconfigxml",
+- 'otherfile': 'otherfilecontent'})
+-
+- dsrc = self._get_ds({'ovfcontent': new_ovfenv})
+- ret = dsrc.get_data()
+- self.assertTrue(ret)
+- self.assertEqual(dsrc.userdata_raw, "NEW_USERDATA")
+- self.assertTrue(os.path.exists(
+- os.path.join(self.waagent_d, 'otherfile')))
+- self.assertFalse(
+- os.path.exists(os.path.join(self.waagent_d, 'SharedConfig.xml')))
+- self.assertTrue(
+- os.path.exists(os.path.join(self.waagent_d, 'ovf-env.xml')))
+- new_xml = load_file(os.path.join(self.waagent_d, 'ovf-env.xml'))
+- self.xml_equals(new_ovfenv, new_xml)
+
+ class TestReadAzureOvf(MockerTestCase):
+ def test_invalid_xml_raises_non_azure_ds(self):
+ invalid_xml = "<foo>" + construct_valid_ovf_env(data={})
+ self.assertRaises(DataSourceAzure.BrokenAzureDataSource,
+- DataSourceAzure.read_azure_ovf, invalid_xml)
++ DataSourceAzure.read_azure_ovf, invalid_xml)
+
+ def test_load_with_pubkeys(self):
+ mypklist = [{'fingerprint': 'fp1', 'path': 'path1', 'value': ''}]
+ pubkeys = [(x['fingerprint'], x['path'], x['value']) for x in mypklist]
+ content = construct_valid_ovf_env(pubkeys=pubkeys)
+- (_md, _ud, cfg) = DataSourceAzure.read_azure_ovf(content)
++ (_, _, cfg) = DataSourceAzure.read_azure_ovf(content)
+ for mypk in mypklist:
+ self.assertIn(mypk, cfg['_pubkeys'])
+
+
+-class TestReadAzureSharedConfig(MockerTestCase):
+- def test_valid_content(self):
+- xml = """<?xml version="1.0" encoding="utf-8"?>
+- <SharedConfig>
+- <Deployment name="MY_INSTANCE_ID">
+- <Service name="myservice"/>
+- <ServiceInstance name="INSTANCE_ID.0" guid="{abcd-uuid}" />
+- </Deployment>
+- <Incarnation number="1"/>
+- </SharedConfig>"""
+- ret = DataSourceAzure.iid_from_shared_config_content(xml)
+- self.assertEqual("MY_INSTANCE_ID", ret)
+-
+-
+ def apply_patches(patches):
+ ret = []
+ for (ref, name, replace) in patches:
+--- a/cloudinit/util.py
++++ b/cloudinit/util.py
+@@ -74,6 +74,31 @@
+ # Helper utils to see if running in a container
+ CONTAINER_TESTS = ['running-in-container', 'lxc-is-container']
+
++# Path for DMI Data
++DMI_SYS_PATH = "/sys/class/dmi/id"
++
++# dmidecode and /sys/class/dmi/id/* use different names for the same value,
++# this allows us to refer to them by one canonical name
++DMIDECODE_TO_DMI_SYS_MAPPING = {
++ 'baseboard-asset-tag': 'board_asset_tag',
++ 'baseboard-manufacturer': 'board_vendor',
++ 'baseboard-product-name': 'board_name',
++ 'baseboard-serial-number': 'board_serial',
++ 'baseboard-version': 'board_version',
++ 'bios-release-date': 'bios_date',
++ 'bios-vendor': 'bios_vendor',
++ 'bios-version': 'bios_version',
++ 'chassis-asset-tag': 'chassis_asset_tag',
++ 'chassis-manufacturer': 'chassis_vendor',
++ 'chassis-serial-number': 'chassis_serial',
++ 'chassis-version': 'chassis_version',
++ 'system-manufacturer': 'sys_vendor',
++ 'system-product-name': 'product_name',
++ 'system-serial-number': 'product_serial',
++ 'system-uuid': 'product_uuid',
++ 'system-version': 'product_version',
++}
++
+
+ class ProcessExecutionError(IOError):
+
+@@ -1926,3 +1951,72 @@
+ raise ValueError("Missing required files: %s", ','.join(missing))
+
+ return ret
++
++
++def _read_dmi_syspath(key):
++ """
++ Reads dmi data with from /sys/class/dmi/id
++ """
++ if key not in DMIDECODE_TO_DMI_SYS_MAPPING:
++ return None
++ mapped_key = DMIDECODE_TO_DMI_SYS_MAPPING[key]
++ dmi_key_path = "{0}/{1}".format(DMI_SYS_PATH, mapped_key)
++ LOG.debug("querying dmi data %s", dmi_key_path)
++ try:
++ if not os.path.exists(dmi_key_path):
++ LOG.debug("did not find %s", dmi_key_path)
++ return None
++
++ key_data = load_file(dmi_key_path)
++ if not key_data:
++ LOG.debug("%s did not return any data", dmi_key_path)
++ return None
++
++ LOG.debug("dmi data %s returned %s", dmi_key_path, key_data)
++ return key_data.strip()
++
++ except Exception:
++ logexc(LOG, "failed read of %s", dmi_key_path)
++ return None
++
++
++def _call_dmidecode(key, dmidecode_path):
++ """
++ Calls out to dmidecode to get the data out. This is mostly for supporting
++ OS's without /sys/class/dmi/id support.
++ """
++ try:
++ cmd = [dmidecode_path, "--string", key]
++ (result, _err) = subp(cmd)
++ LOG.debug("dmidecode returned '%s' for '%s'", result, key)
++ return result
++ except (IOError, OSError) as _err:
++ LOG.debug('failed dmidecode cmd: %s\n%s', cmd, _err.message)
++ return None
++
++
++def read_dmi_data(key):
++ """
++ Wrapper for reading DMI data.
++
++ This will do the following (returning the first that produces a
++ result):
++ 1) Use a mapping to translate `key` from dmidecode naming to
++ sysfs naming and look in /sys/class/dmi/... for a value.
++ 2) Use `key` as a sysfs key directly and look in /sys/class/dmi/...
++ 3) Fall-back to passing `key` to `dmidecode --string`.
++
++ If all of the above fail to find a value, None will be returned.
++ """
++ syspath_value = _read_dmi_syspath(key)
++ if syspath_value is not None:
++ return syspath_value
++
++ dmidecode_path = which('dmidecode')
++ if dmidecode_path:
++ return _call_dmidecode(key, dmidecode_path)
++
++ LOG.warn("did not find either path %s or dmidecode command",
++ DMI_SYS_PATH)
++ return None
++
diff --git a/debian/patches/lp-1506244-azure-ssh-key-values.patch b/debian/patches/lp-1506244-azure-ssh-key-values.patch
new file mode 100644
index 00000000..b15f8d68
--- /dev/null
+++ b/debian/patches/lp-1506244-azure-ssh-key-values.patch
@@ -0,0 +1,150 @@
+Description: AZURE: add support/preference for SSH public key values
+ Azure has started to support SSH public key values in addition to SSH
+ public key fingerprints. Per MS, this patch prefers fabric provided
+ values instead of fingerprints.
+Author: Ben Howard <ben.howard@ubuntu.com>
+Origin: upstream, http://bazaar.launchpad.net/~cloud-init-dev/cloud-init/trunk/revision/1149
+Bug: https://bugs.launchpad.net/cloud-init/+bug/1506244
+
+--- a/cloudinit/sources/DataSourceAzure.py
++++ b/cloudinit/sources/DataSourceAzure.py
+@@ -148,9 +148,15 @@
+ wait_for = [shcfgxml]
+
+ fp_files = []
++ key_value = None
+ for pk in self.cfg.get('_pubkeys', []):
+- bname = str(pk['fingerprint'] + ".crt")
+- fp_files += [os.path.join(ddir, bname)]
++ if pk.get('value', None):
++ key_value = pk['value']
++ LOG.debug("ssh authentication: using value from fabric")
++ else:
++ bname = str(pk['fingerprint'] + ".crt")
++ fp_files += [os.path.join(ddir, bname)]
++ LOG.debug("ssh authentication: using fingerprint from fabirc")
+
+ missing = util.log_time(logfunc=LOG.debug, msg="waiting for files",
+ func=wait_for_files,
+@@ -166,7 +172,8 @@
+ metadata['instance-id'] = iid_from_shared_config(shcfgxml)
+ except ValueError as e:
+ LOG.warn("failed to get instance id in %s: %s", shcfgxml, e)
+- metadata['public-keys'] = pubkeys_from_crt_files(fp_files)
++
++ metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
+ return metadata
+
+ def get_data(self):
+@@ -432,7 +439,7 @@
+ elem.text != DEF_PASSWD_REDACTION):
+ elem.text = DEF_PASSWD_REDACTION
+ return ET.tostring(root)
+- except Exception as e:
++ except Exception:
+ LOG.critical("failed to redact userpassword in {}".format(fname))
+ return cnt
+
+@@ -496,7 +503,8 @@
+ for pk_node in pubkeys:
+ if not pk_node.hasChildNodes():
+ continue
+- cur = {'fingerprint': "", 'path': ""}
++
++ cur = {'fingerprint': "", 'path': "", 'value': ""}
+ for child in pk_node.childNodes:
+ if child.nodeType == text_node or not child.localName:
+ continue
+--- a/tests/unittests/test_datasource/test_azure.py
++++ b/tests/unittests/test_datasource/test_azure.py
+@@ -58,10 +58,13 @@
+
+ if pubkeys:
+ content += "<SSH><PublicKeys>\n"
+- for fp, path in pubkeys:
++ for fp, path, value in pubkeys:
+ content += " <PublicKey>"
+- content += ("<Fingerprint>%s</Fingerprint><Path>%s</Path>" %
+- (fp, path))
++ if fp and path:
++ content += ("<Fingerprint>%s</Fingerprint><Path>%s</Path>" %
++ (fp, path))
++ if value:
++ content += "<Value>%s</Value>" % value
+ content += "</PublicKey>\n"
+ content += "</PublicKeys></SSH>"
+ content += """
+@@ -173,7 +176,7 @@
+ def xml_notequals(self, oxml, nxml):
+ try:
+ self.xml_equals(oxml, nxml)
+- except AssertionError as e:
++ except AssertionError:
+ return
+ raise AssertionError("XML is the same")
+
+@@ -286,10 +289,10 @@
+ self.assertFalse(ret)
+ self.assertFalse('agent_invoked' in data)
+
+- def test_cfg_has_pubkeys(self):
++ def test_cfg_has_pubkeys_fingerprint(self):
+ odata = {'HostName': "myhost", 'UserName': "myuser"}
+- mypklist = [{'fingerprint': 'fp1', 'path': 'path1'}]
+- pubkeys = [(x['fingerprint'], x['path']) for x in mypklist]
++ mypklist = [{'fingerprint': 'fp1', 'path': 'path1', 'value': ''}]
++ pubkeys = [(x['fingerprint'], x['path'], x['value']) for x in mypklist]
+ data = {'ovfcontent': construct_valid_ovf_env(data=odata,
+ pubkeys=pubkeys)}
+
+@@ -298,6 +301,39 @@
+ self.assertTrue(ret)
+ for mypk in mypklist:
+ self.assertIn(mypk, dsrc.cfg['_pubkeys'])
++ self.assertIn('pubkey_from', dsrc.metadata['public-keys'][-1])
++
++ def test_cfg_has_pubkeys_value(self):
++ # make sure that provided key is used over fingerprint
++ odata = {'HostName': "myhost", 'UserName': "myuser"}
++ mypklist = [{'fingerprint': 'fp1', 'path': 'path1', 'value': 'value1'}]
++ pubkeys = [(x['fingerprint'], x['path'], x['value']) for x in mypklist]
++ data = {'ovfcontent': construct_valid_ovf_env(data=odata,
++ pubkeys=pubkeys)}
++
++ dsrc = self._get_ds(data)
++ ret = dsrc.get_data()
++ self.assertTrue(ret)
++
++ for mypk in mypklist:
++ self.assertIn(mypk, dsrc.cfg['_pubkeys'])
++ self.assertIn(mypk['value'], dsrc.metadata['public-keys'])
++
++ def test_cfg_has_no_fingerprint_has_value(self):
++ # test value is used when fingerprint not provided
++ odata = {'HostName': "myhost", 'UserName': "myuser"}
++ mypklist = [{'fingerprint': None, 'path': 'path1', 'value': 'value1'}]
++ pubkeys = [(x['fingerprint'], x['path'], x['value']) for x in mypklist]
++ data = {'ovfcontent': construct_valid_ovf_env(data=odata,
++ pubkeys=pubkeys)}
++
++ dsrc = self._get_ds(data)
++ ret = dsrc.get_data()
++ self.assertTrue(ret)
++
++ for mypk in mypklist:
++ self.assertIn(mypk['value'], dsrc.metadata['public-keys'])
++
+
+ def test_disabled_bounce(self):
+ pass
+@@ -439,8 +475,8 @@
+ DataSourceAzure.read_azure_ovf, invalid_xml)
+
+ def test_load_with_pubkeys(self):
+- mypklist = [{'fingerprint': 'fp1', 'path': 'path1'}]
+- pubkeys = [(x['fingerprint'], x['path']) for x in mypklist]
++ mypklist = [{'fingerprint': 'fp1', 'path': 'path1', 'value': ''}]
++ pubkeys = [(x['fingerprint'], x['path'], x['value']) for x in mypklist]
+ content = construct_valid_ovf_env(pubkeys=pubkeys)
+ (_md, _ud, cfg) = DataSourceAzure.read_azure_ovf(content)
+ for mypk in mypklist:
diff --git a/debian/patches/series b/debian/patches/series
index 7072f091..d323c22f 100644
--- a/debian/patches/series
+++ b/debian/patches/series
@@ -17,3 +17,5 @@ lp-1469260-fix-consumption-of-vendor-data.patch
lp-1461242-generate-ed25519-host-keys.patch
lp-1493453-nocloudds-vendor_data.patch
lp-1177432-same-archives-as-ubuntu-server.patch
+lp-1506244-azure-ssh-key-values.patch
+lp-1506187-azure_use_unique_vm_id.patch