311 lines
13 KiB
Diff
311 lines
13 KiB
Diff
From a23c886ea2cd301b6021eb03636beb5b92c429dc Mon Sep 17 00:00:00 2001
|
|
From: Alberto Contreras <alberto.contreras@canonical.com>
|
|
Date: Wed, 22 Jun 2022 17:38:17 +0200
|
|
Subject: [PATCH] cloud-config: honor cloud_dir setting (#1523)
|
|
|
|
Reference:https://github.com/canonical/cloud-init/commit/a23c886ea2cd301b6021eb03636beb5b92c429dc
|
|
Conflict:(1)only change logs.py and test_logs.py
|
|
(2)do not add TestParser class in test_logs.py.
|
|
(3)format diffs.
|
|
|
|
Ensure cloud_dir setting is respected rather than hardcoding
|
|
"/var/lib/cloud"
|
|
|
|
- Modules affected: cmd.main, apport, devel.logs (collect-logs),
|
|
cc_snap, sources.DataSourceAzure, sources.DataSourceBigstep,
|
|
util:fetch_ssl_details.
|
|
- testing: Extend and port to pytest unit tests, add integration test.
|
|
|
|
LP: #1976564
|
|
---
|
|
cloudinit/cmd/devel/logs.py | 15 ++-
|
|
cloudinit/cmd/devel/tests/test_logs.py | 174 +++++++++++++------------
|
|
2 files changed, 99 insertions(+), 90 deletions(-)
|
|
|
|
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
|
|
index 31ade73..84c157d 100644
|
|
--- a/cloudinit/cmd/devel/logs.py
|
|
+++ b/cloudinit/cmd/devel/logs.py
|
|
@@ -10,6 +10,7 @@ import os
|
|
import shutil
|
|
import sys
|
|
|
|
+from cloudinit.cmd.devel import read_cfg_paths
|
|
from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
|
|
from cloudinit.temp_utils import tempdir
|
|
from cloudinit.subp import (ProcessExecutionError, subp)
|
|
@@ -18,7 +19,11 @@ from cloudinit.util import (chdir, copy, ensure_dir, write_file)
|
|
|
|
CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log']
|
|
CLOUDINIT_RUN_DIR = '/run/cloud-init'
|
|
-USER_DATA_FILE = '/var/lib/cloud/instance/user-data.txt' # Optional
|
|
+
|
|
+
|
|
+def _get_user_data_file() -> str:
|
|
+ paths = read_cfg_paths()
|
|
+ return paths.get_ipath_cur("userdata_raw")
|
|
|
|
|
|
def get_parser(parser=None):
|
|
@@ -40,11 +45,12 @@ def get_parser(parser=None):
|
|
"--tarfile", '-t', default='cloud-init.tar.gz',
|
|
help=('The tarfile to create containing all collected logs.'
|
|
' Default: cloud-init.tar.gz'))
|
|
+ user_data_file = _get_user_data_file()
|
|
parser.add_argument(
|
|
"--include-userdata", '-u', default=False, action='store_true',
|
|
dest='userdata', help=(
|
|
'Optionally include user-data from {0} which could contain'
|
|
- ' sensitive information.'.format(USER_DATA_FILE)))
|
|
+ ' sensitive information.'.format(user_data_file)))
|
|
return parser
|
|
|
|
|
|
@@ -85,7 +91,7 @@ def _collect_file(path, out_dir, verbosity):
|
|
_debug("file %s did not exist\n" % path, 2, verbosity)
|
|
|
|
|
|
-def collect_logs(tarfile, include_userdata, verbosity=0):
|
|
+def collect_logs(tarfile, include_userdata: bool, verbosity=0):
|
|
"""Collect all cloud-init logs and tar them up into the provided tarfile.
|
|
|
|
@param tarfile: The path of the tar-gzipped file to create.
|
|
@@ -123,7 +129,8 @@ def collect_logs(tarfile, include_userdata, verbosity=0):
|
|
for log in CLOUDINIT_LOGS:
|
|
_collect_file(log, log_dir, verbosity)
|
|
if include_userdata:
|
|
- _collect_file(USER_DATA_FILE, log_dir, verbosity)
|
|
+ user_data_file = _get_user_data_file()
|
|
+ _collect_file(user_data_file, log_dir, verbosity)
|
|
run_dir = os.path.join(log_dir, 'run')
|
|
ensure_dir(run_dir)
|
|
if os.path.exists(CLOUDINIT_RUN_DIR):
|
|
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
|
|
index ddfd58e..4ef6d5d 100644
|
|
--- a/cloudinit/cmd/devel/tests/test_logs.py
|
|
+++ b/cloudinit/cmd/devel/tests/test_logs.py
|
|
@@ -2,48 +2,46 @@
|
|
|
|
from datetime import datetime
|
|
import os
|
|
+import re
|
|
from io import StringIO
|
|
|
|
from cloudinit.cmd.devel import logs
|
|
from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
|
|
-from cloudinit.tests.helpers import (
|
|
- FilesystemMockingTestCase, mock, wrap_and_call)
|
|
from cloudinit.subp import subp
|
|
-from cloudinit.util import ensure_dir, load_file, write_file
|
|
+from cloudinit.util import load_file, write_file
|
|
+from cloudinit.tests.helpers import mock
|
|
|
|
+M_PATH = "cloudinit.cmd.devel.logs."
|
|
|
|
-@mock.patch('cloudinit.cmd.devel.logs.os.getuid')
|
|
-class TestCollectLogs(FilesystemMockingTestCase):
|
|
-
|
|
- def setUp(self):
|
|
- super(TestCollectLogs, self).setUp()
|
|
- self.new_root = self.tmp_dir()
|
|
- self.run_dir = self.tmp_path('run', self.new_root)
|
|
-
|
|
- def test_collect_logs_with_userdata_requires_root_user(self, m_getuid):
|
|
+@mock.patch("cloudinit.cmd.devel.logs.os.getuid")
|
|
+class TestCollectLogs:
|
|
+ def test_collect_logs_with_userdata_requires_root_user(
|
|
+ self, m_getuid, tmpdir
|
|
+ ):
|
|
"""collect-logs errors when non-root user collects userdata ."""
|
|
m_getuid.return_value = 100 # non-root
|
|
- output_tarfile = self.tmp_path('logs.tgz')
|
|
+ output_tarfile = tmpdir.join('logs.tgz')
|
|
with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
|
|
- self.assertEqual(
|
|
- 1, logs.collect_logs(output_tarfile, include_userdata=True))
|
|
- self.assertEqual(
|
|
+ assert 1 == logs.collect_logs(
|
|
+ output_tarfile, include_userdata=True
|
|
+ )
|
|
+ assert (
|
|
'To include userdata, root user is required.'
|
|
- ' Try sudo cloud-init collect-logs\n',
|
|
- m_stderr.getvalue())
|
|
+ " Try sudo cloud-init collect-logs\n" == m_stderr.getvalue()
|
|
+ )
|
|
|
|
- def test_collect_logs_creates_tarfile(self, m_getuid):
|
|
+ def test_collect_logs_creates_tarfile(self, m_getuid, mocker, tmpdir):
|
|
"""collect-logs creates a tarfile with all related cloud-init info."""
|
|
m_getuid.return_value = 100
|
|
- log1 = self.tmp_path('cloud-init.log', self.new_root)
|
|
+ log1 = tmpdir.join("cloud-init.log")
|
|
write_file(log1, 'cloud-init-log')
|
|
- log2 = self.tmp_path('cloud-init-output.log', self.new_root)
|
|
+ log2 = tmpdir.join("cloud-init-output.log")
|
|
write_file(log2, 'cloud-init-output-log')
|
|
- ensure_dir(self.run_dir)
|
|
- write_file(self.tmp_path('results.json', self.run_dir), 'results')
|
|
- write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir),
|
|
+ run_dir = tmpdir.join("run")
|
|
+ write_file(run_dir.join("results.json"), "results")
|
|
+ write_file(run_dir.join(INSTANCE_JSON_SENSITIVE_FILE,),
|
|
'sensitive')
|
|
- output_tarfile = self.tmp_path('logs.tgz')
|
|
+ output_tarfile = str(tmpdir.join("logs.tgz"))
|
|
|
|
date = datetime.utcnow().date().strftime('%Y-%m-%d')
|
|
date_logdir = 'cloud-init-logs-{0}'.format(date)
|
|
@@ -68,59 +66,61 @@ class TestCollectLogs(FilesystemMockingTestCase):
|
|
return expected_subp[cmd_tuple], ''
|
|
|
|
fake_stderr = mock.MagicMock()
|
|
-
|
|
- wrap_and_call(
|
|
- 'cloudinit.cmd.devel.logs',
|
|
- {'subp': {'side_effect': fake_subp},
|
|
- 'sys.stderr': {'new': fake_stderr},
|
|
- 'CLOUDINIT_LOGS': {'new': [log1, log2]},
|
|
- 'CLOUDINIT_RUN_DIR': {'new': self.run_dir}},
|
|
- logs.collect_logs, output_tarfile, include_userdata=False)
|
|
+ mocker.patch(M_PATH + "subp", side_effect=fake_subp)
|
|
+ mocker.patch(M_PATH + "sys.stderr", fake_stderr)
|
|
+ mocker.patch(M_PATH + "CLOUDINIT_LOGS", [log1, log2])
|
|
+ mocker.patch(M_PATH + "CLOUDINIT_RUN_DIR", run_dir)
|
|
+ logs.collect_logs(output_tarfile, include_userdata=False)
|
|
# unpack the tarfile and check file contents
|
|
- subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
|
|
- out_logdir = self.tmp_path(date_logdir, self.new_root)
|
|
- self.assertFalse(
|
|
- os.path.exists(
|
|
- os.path.join(out_logdir, 'run', 'cloud-init',
|
|
- INSTANCE_JSON_SENSITIVE_FILE)),
|
|
- 'Unexpected file found: %s' % INSTANCE_JSON_SENSITIVE_FILE)
|
|
- self.assertEqual(
|
|
- '0.7fake\n',
|
|
- load_file(os.path.join(out_logdir, 'dpkg-version')))
|
|
- self.assertEqual(version_out,
|
|
- load_file(os.path.join(out_logdir, 'version')))
|
|
- self.assertEqual(
|
|
- 'cloud-init-log',
|
|
- load_file(os.path.join(out_logdir, 'cloud-init.log')))
|
|
- self.assertEqual(
|
|
- 'cloud-init-output-log',
|
|
- load_file(os.path.join(out_logdir, 'cloud-init-output.log')))
|
|
- self.assertEqual(
|
|
- 'dmesg-out\n',
|
|
- load_file(os.path.join(out_logdir, 'dmesg.txt')))
|
|
- self.assertEqual(
|
|
- 'journal-out\n',
|
|
- load_file(os.path.join(out_logdir, 'journal.txt')))
|
|
- self.assertEqual(
|
|
- 'results',
|
|
- load_file(
|
|
- os.path.join(out_logdir, 'run', 'cloud-init', 'results.json')))
|
|
+ subp(["tar", "zxvf", output_tarfile, "-C", str(tmpdir)])
|
|
+ out_logdir = tmpdir.join(date_logdir)
|
|
+ assert not os.path.exists(
|
|
+ os.path.join(
|
|
+ out_logdir,
|
|
+ "run",
|
|
+ "cloud-init",
|
|
+ INSTANCE_JSON_SENSITIVE_FILE,
|
|
+ )
|
|
+ ), (
|
|
+ "Unexpected file found: %s" % INSTANCE_JSON_SENSITIVE_FILE
|
|
+ )
|
|
+ assert "0.7fake\n" == load_file(
|
|
+ os.path.join(out_logdir, "dpkg-version")
|
|
+ )
|
|
+ assert version_out == load_file(os.path.join(out_logdir, "version"))
|
|
+ assert "cloud-init-log" == load_file(
|
|
+ os.path.join(out_logdir, "cloud-init.log")
|
|
+ )
|
|
+ assert "cloud-init-output-log" == load_file(
|
|
+ os.path.join(out_logdir, "cloud-init-output.log")
|
|
+ )
|
|
+ assert "dmesg-out\n" == load_file(
|
|
+ os.path.join(out_logdir, "dmesg.txt")
|
|
+ )
|
|
+ assert "journal-out\n" == load_file(
|
|
+ os.path.join(out_logdir, "journal.txt")
|
|
+ )
|
|
+ assert "results" == load_file(
|
|
+ os.path.join(out_logdir, "run", "cloud-init", "results.json")
|
|
+ )
|
|
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
|
|
|
|
- def test_collect_logs_includes_optional_userdata(self, m_getuid):
|
|
+ def test_collect_logs_includes_optional_userdata(
|
|
+ self, m_getuid, mocker, tmpdir
|
|
+ ):
|
|
"""collect-logs include userdata when --include-userdata is set."""
|
|
m_getuid.return_value = 0
|
|
- log1 = self.tmp_path('cloud-init.log', self.new_root)
|
|
+ log1 = tmpdir.join("cloud-init.log")
|
|
write_file(log1, 'cloud-init-log')
|
|
- log2 = self.tmp_path('cloud-init-output.log', self.new_root)
|
|
+ log2 = tmpdir.join("cloud-init-output.log")
|
|
write_file(log2, 'cloud-init-output-log')
|
|
- userdata = self.tmp_path('user-data.txt', self.new_root)
|
|
+ userdata = tmpdir.join("user-data.txt")
|
|
write_file(userdata, 'user-data')
|
|
- ensure_dir(self.run_dir)
|
|
- write_file(self.tmp_path('results.json', self.run_dir), 'results')
|
|
- write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir),
|
|
+ run_dir = tmpdir.join("run")
|
|
+ write_file(run_dir.join("results.json"), "results")
|
|
+ write_file(run_dir.join(INSTANCE_JSON_SENSITIVE_FILE),
|
|
'sensitive')
|
|
- output_tarfile = self.tmp_path('logs.tgz')
|
|
+ output_tarfile = str(tmpdir.join("logs.tgz"))
|
|
|
|
date = datetime.utcnow().date().strftime('%Y-%m-%d')
|
|
date_logdir = 'cloud-init-logs-{0}'.format(date)
|
|
@@ -146,22 +146,24 @@ class TestCollectLogs(FilesystemMockingTestCase):
|
|
|
|
fake_stderr = mock.MagicMock()
|
|
|
|
- wrap_and_call(
|
|
- 'cloudinit.cmd.devel.logs',
|
|
- {'subp': {'side_effect': fake_subp},
|
|
- 'sys.stderr': {'new': fake_stderr},
|
|
- 'CLOUDINIT_LOGS': {'new': [log1, log2]},
|
|
- 'CLOUDINIT_RUN_DIR': {'new': self.run_dir},
|
|
- 'USER_DATA_FILE': {'new': userdata}},
|
|
- logs.collect_logs, output_tarfile, include_userdata=True)
|
|
+ mocker.patch(M_PATH + "subp", side_effect=fake_subp)
|
|
+ mocker.patch(M_PATH + "sys.stderr", fake_stderr)
|
|
+ mocker.patch(M_PATH + "CLOUDINIT_LOGS", [log1, log2])
|
|
+ mocker.patch(M_PATH + "CLOUDINIT_RUN_DIR", run_dir)
|
|
+ mocker.patch(M_PATH + "_get_user_data_file", return_value=userdata)
|
|
+ logs.collect_logs(output_tarfile, include_userdata=True)
|
|
# unpack the tarfile and check file contents
|
|
- subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
|
|
- out_logdir = self.tmp_path(date_logdir, self.new_root)
|
|
- self.assertEqual(
|
|
- 'user-data',
|
|
- load_file(os.path.join(out_logdir, 'user-data.txt')))
|
|
- self.assertEqual(
|
|
- 'sensitive',
|
|
- load_file(os.path.join(out_logdir, 'run', 'cloud-init',
|
|
- INSTANCE_JSON_SENSITIVE_FILE)))
|
|
+ subp(["tar", "zxvf", output_tarfile, "-C", str(tmpdir)])
|
|
+ out_logdir = tmpdir.join(date_logdir)
|
|
+ assert "user-data" == load_file(
|
|
+ os.path.join(out_logdir, "user-data.txt")
|
|
+ )
|
|
+ assert "sensitive" == load_file(
|
|
+ os.path.join(
|
|
+ out_logdir,
|
|
+ "run",
|
|
+ "cloud-init",
|
|
+ INSTANCE_JSON_SENSITIVE_FILE,
|
|
+ )
|
|
+ )
|
|
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
|
|
--
|
|
2.33.0
|
|
|
|
|