!164 backport upstream patches
From: @tong_1001 Reviewed-by: @gaoruoshu Signed-off-by: @gaoruoshu
This commit is contained in:
commit
befe33d956
40
backport-Return-a-namedtuple-from-subp-1376.patch
Normal file
40
backport-Return-a-namedtuple-from-subp-1376.patch
Normal file
@ -0,0 +1,40 @@
|
||||
From b7a04a4616a8b8d70aa49039a973e372d12c4083 Mon Sep 17 00:00:00 2001
|
||||
From: Brett Holman <bholman.devel@gmail.com>
|
||||
Date: Tue, 12 Apr 2022 15:53:23 -0500
|
||||
Subject: [PATCH] Return a namedtuple from subp() (#1376)
|
||||
|
||||
Reference:https://github.com/canonical/cloud-init/commit/b7a04a4616a8b8d70aa49039a973e372d12c4083
|
||||
Conflict:only add SubpResult
|
||||
|
||||
This provides a minor readability improvement.
|
||||
|
||||
subp.subp(cmd)[0] -> subp.subp(cmd).stdout
|
||||
subp.subp(cmd)[1] -> subp.subp(cmd).stderr
|
||||
---
|
||||
cloudinit/subp.py | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
diff --git a/cloudinit/subp.py b/cloudinit/subp.py
|
||||
index 024e1a9..267142e 100644
|
||||
--- a/cloudinit/subp.py
|
||||
+++ b/cloudinit/subp.py
|
||||
@@ -1,6 +1,7 @@
|
||||
# This file is part of cloud-init. See LICENSE file for license information.
|
||||
"""Common utility functions for interacting with subprocess."""
|
||||
|
||||
+import collections
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
@@ -9,6 +10,7 @@ from errno import ENOEXEC
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
+SubpResult = collections.namedtuple("SubpResult", ["stdout", "stderr"])
|
||||
|
||||
def prepend_base_command(base_command, commands):
|
||||
"""Ensure user-provided commands start with base_command; warn otherwise.
|
||||
--
|
||||
2.33.0
|
||||
|
||||
|
||||
@ -0,0 +1,54 @@
|
||||
From 11a4fd1f80e32fd306e9fb8006321f303f7c91ba Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?=E7=8E=8B=E7=85=8E=E9=A5=BC?= <bin456789@gmail.com>
|
||||
Date: Tue, 15 Aug 2023 05:08:35 +0800
|
||||
Subject: [PATCH] cc_mounts: Fix swapfile not working on btrfs (#4319)
|
||||
|
||||
Reference:https://github.com/canonical/cloud-init/commit/11a4fd1f80e32fd306e9fb8006321f303f7c91ba
|
||||
Conflict:(1)do not change tools/.github-cla-signers
|
||||
(2)change test_handler_mounts.py not test_cc_mounts.py.
|
||||
(3)format diff.
|
||||
|
||||
To make a swapfile work on btrfs, we need to create an empty file
|
||||
and add the "no copy-on-write" attribute before making it a swapfile.
|
||||
|
||||
See https://btrfs.readthedocs.io/en/latest/Swapfile.html
|
||||
|
||||
Fixes GH-3713
|
||||
LP: #1884127
|
||||
---
|
||||
cloudinit/config/cc_mounts.py | 4 ++++
|
||||
tests/unittests/test_handler/test_handler_mounts.py | 2 ++
|
||||
2 files changed, 6 insertions(+)
|
||||
|
||||
diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py
|
||||
index 1c6b883..54ca3f3 100644
|
||||
--- a/cloudinit/config/cc_mounts.py
|
||||
+++ b/cloudinit/config/cc_mounts.py
|
||||
@@ -253,6 +253,10 @@ def create_swapfile(fname: str, size: str) -> None:
|
||||
|
||||
fstype = util.get_mount_info(swap_dir)[1]
|
||||
|
||||
+ if fstype == "btrfs":
|
||||
+ subp.subp(["truncate", "-s", "0", fname])
|
||||
+ subp.subp(["chattr", "+C", fname])
|
||||
+
|
||||
if (fstype == "xfs" and
|
||||
util.kernel_version() < (4, 18)) or fstype == "btrfs":
|
||||
create_swap(fname, size, "dd")
|
||||
diff --git a/tests/unittests/test_handler/test_handler_mounts.py b/tests/unittests/test_handler/test_handler_mounts.py
|
||||
index 69e8b30..8a6f38c 100644
|
||||
--- a/tests/unittests/test_handler/test_handler_mounts.py
|
||||
+++ b/tests/unittests/test_handler/test_handler_mounts.py
|
||||
@@ -230,6 +230,8 @@ class TestSwapFileCreation(test_helpers.FilesystemMockingTestCase):
|
||||
|
||||
cc_mounts.handle(None, self.cc, self.mock_cloud, self.mock_log, [])
|
||||
self.m_subp_subp.assert_has_calls([
|
||||
+ mock.call(["truncate", "-s", "0", self.swap_path]),
|
||||
+ mock.call(["chattr", "+C", self.swap_path]),
|
||||
mock.call(['dd', 'if=/dev/zero',
|
||||
'of=' + self.swap_path,
|
||||
'bs=1M', 'count=0'], capture=True),
|
||||
--
|
||||
2.33.0
|
||||
|
||||
|
||||
@ -0,0 +1,50 @@
|
||||
From 3277590399a8a71a320a6f681443580a82a8a3ff Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?=E7=8E=8B=E7=85=8E=E9=A5=BC?= <bin456789@gmail.com>
|
||||
Date: Tue, 22 Aug 2023 04:17:05 +0800
|
||||
Subject: [PATCH] cc_mounts: Use fallocate to create swapfile on btrfs (#4369)
|
||||
|
||||
Reference:https://github.com/canonical/cloud-init/commit/3277590399a8a71a320a6f681443580a82a8a3ff
|
||||
Conflict:(1)format diff.
|
||||
(2)change test_handler_mounts.py not test_cc_mounts.py.
|
||||
|
||||
Swapfile works fine with fallocate on btrfs.
|
||||
Btrfs official document also use fallocate instead of dd.
|
||||
|
||||
See https://btrfs.readthedocs.io/en/latest/Swapfile.html
|
||||
---
|
||||
cloudinit/config/cc_mounts.py | 2 +-
|
||||
tests/unittests/test_handler/test_handler_mounts.py | 4 +---
|
||||
2 files changed, 2 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py
|
||||
index 54ca3f3..2d645b3 100644
|
||||
--- a/cloudinit/config/cc_mounts.py
|
||||
+++ b/cloudinit/config/cc_mounts.py
|
||||
@@ -258,7 +258,7 @@ def create_swapfile(fname: str, size: str) -> None:
|
||||
subp.subp(["chattr", "+C", fname])
|
||||
|
||||
if (fstype == "xfs" and
|
||||
- util.kernel_version() < (4, 18)) or fstype == "btrfs":
|
||||
+ util.kernel_version() < (4, 18)):
|
||||
create_swap(fname, size, "dd")
|
||||
else:
|
||||
try:
|
||||
diff --git a/tests/unittests/test_handler/test_handler_mounts.py b/tests/unittests/test_handler/test_handler_mounts.py
|
||||
index 8a6f38c..6799c1e 100644
|
||||
--- a/tests/unittests/test_handler/test_handler_mounts.py
|
||||
+++ b/tests/unittests/test_handler/test_handler_mounts.py
|
||||
@@ -232,9 +232,7 @@ class TestSwapFileCreation(test_helpers.FilesystemMockingTestCase):
|
||||
self.m_subp_subp.assert_has_calls([
|
||||
mock.call(["truncate", "-s", "0", self.swap_path]),
|
||||
mock.call(["chattr", "+C", self.swap_path]),
|
||||
- mock.call(['dd', 'if=/dev/zero',
|
||||
- 'of=' + self.swap_path,
|
||||
- 'bs=1M', 'count=0'], capture=True),
|
||||
+ mock.call(["fallocate", "-l", "0M", self.swap_path], capture=True),
|
||||
mock.call(['mkswap', self.swap_path]),
|
||||
mock.call(['swapon', '-a'])])
|
||||
|
||||
--
|
||||
2.33.0
|
||||
|
||||
|
||||
310
backport-cloud-config-honor-cloud_dir-setting-1523.patch
Normal file
310
backport-cloud-config-honor-cloud_dir-setting-1523.patch
Normal file
@ -0,0 +1,310 @@
|
||||
From a23c886ea2cd301b6021eb03636beb5b92c429dc Mon Sep 17 00:00:00 2001
|
||||
From: Alberto Contreras <alberto.contreras@canonical.com>
|
||||
Date: Wed, 22 Jun 2022 17:38:17 +0200
|
||||
Subject: [PATCH] cloud-config: honor cloud_dir setting (#1523)
|
||||
|
||||
Reference:https://github.com/canonical/cloud-init/commit/a23c886ea2cd301b6021eb03636beb5b92c429dc
|
||||
Conflict:(1)only change logs.py and test_logs.py
|
||||
(2)do not add TestParser class in test_logs.py.
|
||||
(3)format diffs.
|
||||
|
||||
Ensure cloud_dir setting is respected rather than hardcoding
|
||||
"/var/lib/cloud"
|
||||
|
||||
- Modules affected: cmd.main, apport, devel.logs (collect-logs),
|
||||
cc_snap, sources.DataSourceAzure, sources.DataSourceBigstep,
|
||||
util:fetch_ssl_details.
|
||||
- testing: Extend and port to pytest unit tests, add integration test.
|
||||
|
||||
LP: #1976564
|
||||
---
|
||||
cloudinit/cmd/devel/logs.py | 15 ++-
|
||||
cloudinit/cmd/devel/tests/test_logs.py | 174 +++++++++++++------------
|
||||
2 files changed, 99 insertions(+), 90 deletions(-)
|
||||
|
||||
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
|
||||
index 31ade73..84c157d 100644
|
||||
--- a/cloudinit/cmd/devel/logs.py
|
||||
+++ b/cloudinit/cmd/devel/logs.py
|
||||
@@ -10,6 +10,7 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
+from cloudinit.cmd.devel import read_cfg_paths
|
||||
from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
|
||||
from cloudinit.temp_utils import tempdir
|
||||
from cloudinit.subp import (ProcessExecutionError, subp)
|
||||
@@ -18,7 +19,11 @@ from cloudinit.util import (chdir, copy, ensure_dir, write_file)
|
||||
|
||||
CLOUDINIT_LOGS = ['/var/log/cloud-init.log', '/var/log/cloud-init-output.log']
|
||||
CLOUDINIT_RUN_DIR = '/run/cloud-init'
|
||||
-USER_DATA_FILE = '/var/lib/cloud/instance/user-data.txt' # Optional
|
||||
+
|
||||
+
|
||||
+def _get_user_data_file() -> str:
|
||||
+ paths = read_cfg_paths()
|
||||
+ return paths.get_ipath_cur("userdata_raw")
|
||||
|
||||
|
||||
def get_parser(parser=None):
|
||||
@@ -40,11 +45,12 @@ def get_parser(parser=None):
|
||||
"--tarfile", '-t', default='cloud-init.tar.gz',
|
||||
help=('The tarfile to create containing all collected logs.'
|
||||
' Default: cloud-init.tar.gz'))
|
||||
+ user_data_file = _get_user_data_file()
|
||||
parser.add_argument(
|
||||
"--include-userdata", '-u', default=False, action='store_true',
|
||||
dest='userdata', help=(
|
||||
'Optionally include user-data from {0} which could contain'
|
||||
- ' sensitive information.'.format(USER_DATA_FILE)))
|
||||
+ ' sensitive information.'.format(user_data_file)))
|
||||
return parser
|
||||
|
||||
|
||||
@@ -85,7 +91,7 @@ def _collect_file(path, out_dir, verbosity):
|
||||
_debug("file %s did not exist\n" % path, 2, verbosity)
|
||||
|
||||
|
||||
-def collect_logs(tarfile, include_userdata, verbosity=0):
|
||||
+def collect_logs(tarfile, include_userdata: bool, verbosity=0):
|
||||
"""Collect all cloud-init logs and tar them up into the provided tarfile.
|
||||
|
||||
@param tarfile: The path of the tar-gzipped file to create.
|
||||
@@ -123,7 +129,8 @@ def collect_logs(tarfile, include_userdata, verbosity=0):
|
||||
for log in CLOUDINIT_LOGS:
|
||||
_collect_file(log, log_dir, verbosity)
|
||||
if include_userdata:
|
||||
- _collect_file(USER_DATA_FILE, log_dir, verbosity)
|
||||
+ user_data_file = _get_user_data_file()
|
||||
+ _collect_file(user_data_file, log_dir, verbosity)
|
||||
run_dir = os.path.join(log_dir, 'run')
|
||||
ensure_dir(run_dir)
|
||||
if os.path.exists(CLOUDINIT_RUN_DIR):
|
||||
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
|
||||
index ddfd58e..4ef6d5d 100644
|
||||
--- a/cloudinit/cmd/devel/tests/test_logs.py
|
||||
+++ b/cloudinit/cmd/devel/tests/test_logs.py
|
||||
@@ -2,48 +2,46 @@
|
||||
|
||||
from datetime import datetime
|
||||
import os
|
||||
+import re
|
||||
from io import StringIO
|
||||
|
||||
from cloudinit.cmd.devel import logs
|
||||
from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
|
||||
-from cloudinit.tests.helpers import (
|
||||
- FilesystemMockingTestCase, mock, wrap_and_call)
|
||||
from cloudinit.subp import subp
|
||||
-from cloudinit.util import ensure_dir, load_file, write_file
|
||||
+from cloudinit.util import load_file, write_file
|
||||
+from cloudinit.tests.helpers import mock
|
||||
|
||||
+M_PATH = "cloudinit.cmd.devel.logs."
|
||||
|
||||
-@mock.patch('cloudinit.cmd.devel.logs.os.getuid')
|
||||
-class TestCollectLogs(FilesystemMockingTestCase):
|
||||
-
|
||||
- def setUp(self):
|
||||
- super(TestCollectLogs, self).setUp()
|
||||
- self.new_root = self.tmp_dir()
|
||||
- self.run_dir = self.tmp_path('run', self.new_root)
|
||||
-
|
||||
- def test_collect_logs_with_userdata_requires_root_user(self, m_getuid):
|
||||
+@mock.patch("cloudinit.cmd.devel.logs.os.getuid")
|
||||
+class TestCollectLogs:
|
||||
+ def test_collect_logs_with_userdata_requires_root_user(
|
||||
+ self, m_getuid, tmpdir
|
||||
+ ):
|
||||
"""collect-logs errors when non-root user collects userdata ."""
|
||||
m_getuid.return_value = 100 # non-root
|
||||
- output_tarfile = self.tmp_path('logs.tgz')
|
||||
+ output_tarfile = tmpdir.join('logs.tgz')
|
||||
with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
|
||||
- self.assertEqual(
|
||||
- 1, logs.collect_logs(output_tarfile, include_userdata=True))
|
||||
- self.assertEqual(
|
||||
+ assert 1 == logs.collect_logs(
|
||||
+ output_tarfile, include_userdata=True
|
||||
+ )
|
||||
+ assert (
|
||||
'To include userdata, root user is required.'
|
||||
- ' Try sudo cloud-init collect-logs\n',
|
||||
- m_stderr.getvalue())
|
||||
+ " Try sudo cloud-init collect-logs\n" == m_stderr.getvalue()
|
||||
+ )
|
||||
|
||||
- def test_collect_logs_creates_tarfile(self, m_getuid):
|
||||
+ def test_collect_logs_creates_tarfile(self, m_getuid, mocker, tmpdir):
|
||||
"""collect-logs creates a tarfile with all related cloud-init info."""
|
||||
m_getuid.return_value = 100
|
||||
- log1 = self.tmp_path('cloud-init.log', self.new_root)
|
||||
+ log1 = tmpdir.join("cloud-init.log")
|
||||
write_file(log1, 'cloud-init-log')
|
||||
- log2 = self.tmp_path('cloud-init-output.log', self.new_root)
|
||||
+ log2 = tmpdir.join("cloud-init-output.log")
|
||||
write_file(log2, 'cloud-init-output-log')
|
||||
- ensure_dir(self.run_dir)
|
||||
- write_file(self.tmp_path('results.json', self.run_dir), 'results')
|
||||
- write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir),
|
||||
+ run_dir = tmpdir.join("run")
|
||||
+ write_file(run_dir.join("results.json"), "results")
|
||||
+ write_file(run_dir.join(INSTANCE_JSON_SENSITIVE_FILE,),
|
||||
'sensitive')
|
||||
- output_tarfile = self.tmp_path('logs.tgz')
|
||||
+ output_tarfile = str(tmpdir.join("logs.tgz"))
|
||||
|
||||
date = datetime.utcnow().date().strftime('%Y-%m-%d')
|
||||
date_logdir = 'cloud-init-logs-{0}'.format(date)
|
||||
@@ -68,59 +66,61 @@ class TestCollectLogs(FilesystemMockingTestCase):
|
||||
return expected_subp[cmd_tuple], ''
|
||||
|
||||
fake_stderr = mock.MagicMock()
|
||||
-
|
||||
- wrap_and_call(
|
||||
- 'cloudinit.cmd.devel.logs',
|
||||
- {'subp': {'side_effect': fake_subp},
|
||||
- 'sys.stderr': {'new': fake_stderr},
|
||||
- 'CLOUDINIT_LOGS': {'new': [log1, log2]},
|
||||
- 'CLOUDINIT_RUN_DIR': {'new': self.run_dir}},
|
||||
- logs.collect_logs, output_tarfile, include_userdata=False)
|
||||
+ mocker.patch(M_PATH + "subp", side_effect=fake_subp)
|
||||
+ mocker.patch(M_PATH + "sys.stderr", fake_stderr)
|
||||
+ mocker.patch(M_PATH + "CLOUDINIT_LOGS", [log1, log2])
|
||||
+ mocker.patch(M_PATH + "CLOUDINIT_RUN_DIR", run_dir)
|
||||
+ logs.collect_logs(output_tarfile, include_userdata=False)
|
||||
# unpack the tarfile and check file contents
|
||||
- subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
|
||||
- out_logdir = self.tmp_path(date_logdir, self.new_root)
|
||||
- self.assertFalse(
|
||||
- os.path.exists(
|
||||
- os.path.join(out_logdir, 'run', 'cloud-init',
|
||||
- INSTANCE_JSON_SENSITIVE_FILE)),
|
||||
- 'Unexpected file found: %s' % INSTANCE_JSON_SENSITIVE_FILE)
|
||||
- self.assertEqual(
|
||||
- '0.7fake\n',
|
||||
- load_file(os.path.join(out_logdir, 'dpkg-version')))
|
||||
- self.assertEqual(version_out,
|
||||
- load_file(os.path.join(out_logdir, 'version')))
|
||||
- self.assertEqual(
|
||||
- 'cloud-init-log',
|
||||
- load_file(os.path.join(out_logdir, 'cloud-init.log')))
|
||||
- self.assertEqual(
|
||||
- 'cloud-init-output-log',
|
||||
- load_file(os.path.join(out_logdir, 'cloud-init-output.log')))
|
||||
- self.assertEqual(
|
||||
- 'dmesg-out\n',
|
||||
- load_file(os.path.join(out_logdir, 'dmesg.txt')))
|
||||
- self.assertEqual(
|
||||
- 'journal-out\n',
|
||||
- load_file(os.path.join(out_logdir, 'journal.txt')))
|
||||
- self.assertEqual(
|
||||
- 'results',
|
||||
- load_file(
|
||||
- os.path.join(out_logdir, 'run', 'cloud-init', 'results.json')))
|
||||
+ subp(["tar", "zxvf", output_tarfile, "-C", str(tmpdir)])
|
||||
+ out_logdir = tmpdir.join(date_logdir)
|
||||
+ assert not os.path.exists(
|
||||
+ os.path.join(
|
||||
+ out_logdir,
|
||||
+ "run",
|
||||
+ "cloud-init",
|
||||
+ INSTANCE_JSON_SENSITIVE_FILE,
|
||||
+ )
|
||||
+ ), (
|
||||
+ "Unexpected file found: %s" % INSTANCE_JSON_SENSITIVE_FILE
|
||||
+ )
|
||||
+ assert "0.7fake\n" == load_file(
|
||||
+ os.path.join(out_logdir, "dpkg-version")
|
||||
+ )
|
||||
+ assert version_out == load_file(os.path.join(out_logdir, "version"))
|
||||
+ assert "cloud-init-log" == load_file(
|
||||
+ os.path.join(out_logdir, "cloud-init.log")
|
||||
+ )
|
||||
+ assert "cloud-init-output-log" == load_file(
|
||||
+ os.path.join(out_logdir, "cloud-init-output.log")
|
||||
+ )
|
||||
+ assert "dmesg-out\n" == load_file(
|
||||
+ os.path.join(out_logdir, "dmesg.txt")
|
||||
+ )
|
||||
+ assert "journal-out\n" == load_file(
|
||||
+ os.path.join(out_logdir, "journal.txt")
|
||||
+ )
|
||||
+ assert "results" == load_file(
|
||||
+ os.path.join(out_logdir, "run", "cloud-init", "results.json")
|
||||
+ )
|
||||
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
|
||||
|
||||
- def test_collect_logs_includes_optional_userdata(self, m_getuid):
|
||||
+ def test_collect_logs_includes_optional_userdata(
|
||||
+ self, m_getuid, mocker, tmpdir
|
||||
+ ):
|
||||
"""collect-logs include userdata when --include-userdata is set."""
|
||||
m_getuid.return_value = 0
|
||||
- log1 = self.tmp_path('cloud-init.log', self.new_root)
|
||||
+ log1 = tmpdir.join("cloud-init.log")
|
||||
write_file(log1, 'cloud-init-log')
|
||||
- log2 = self.tmp_path('cloud-init-output.log', self.new_root)
|
||||
+ log2 = tmpdir.join("cloud-init-output.log")
|
||||
write_file(log2, 'cloud-init-output-log')
|
||||
- userdata = self.tmp_path('user-data.txt', self.new_root)
|
||||
+ userdata = tmpdir.join("user-data.txt")
|
||||
write_file(userdata, 'user-data')
|
||||
- ensure_dir(self.run_dir)
|
||||
- write_file(self.tmp_path('results.json', self.run_dir), 'results')
|
||||
- write_file(self.tmp_path(INSTANCE_JSON_SENSITIVE_FILE, self.run_dir),
|
||||
+ run_dir = tmpdir.join("run")
|
||||
+ write_file(run_dir.join("results.json"), "results")
|
||||
+ write_file(run_dir.join(INSTANCE_JSON_SENSITIVE_FILE),
|
||||
'sensitive')
|
||||
- output_tarfile = self.tmp_path('logs.tgz')
|
||||
+ output_tarfile = str(tmpdir.join("logs.tgz"))
|
||||
|
||||
date = datetime.utcnow().date().strftime('%Y-%m-%d')
|
||||
date_logdir = 'cloud-init-logs-{0}'.format(date)
|
||||
@@ -146,22 +146,24 @@ class TestCollectLogs(FilesystemMockingTestCase):
|
||||
|
||||
fake_stderr = mock.MagicMock()
|
||||
|
||||
- wrap_and_call(
|
||||
- 'cloudinit.cmd.devel.logs',
|
||||
- {'subp': {'side_effect': fake_subp},
|
||||
- 'sys.stderr': {'new': fake_stderr},
|
||||
- 'CLOUDINIT_LOGS': {'new': [log1, log2]},
|
||||
- 'CLOUDINIT_RUN_DIR': {'new': self.run_dir},
|
||||
- 'USER_DATA_FILE': {'new': userdata}},
|
||||
- logs.collect_logs, output_tarfile, include_userdata=True)
|
||||
+ mocker.patch(M_PATH + "subp", side_effect=fake_subp)
|
||||
+ mocker.patch(M_PATH + "sys.stderr", fake_stderr)
|
||||
+ mocker.patch(M_PATH + "CLOUDINIT_LOGS", [log1, log2])
|
||||
+ mocker.patch(M_PATH + "CLOUDINIT_RUN_DIR", run_dir)
|
||||
+ mocker.patch(M_PATH + "_get_user_data_file", return_value=userdata)
|
||||
+ logs.collect_logs(output_tarfile, include_userdata=True)
|
||||
# unpack the tarfile and check file contents
|
||||
- subp(['tar', 'zxvf', output_tarfile, '-C', self.new_root])
|
||||
- out_logdir = self.tmp_path(date_logdir, self.new_root)
|
||||
- self.assertEqual(
|
||||
- 'user-data',
|
||||
- load_file(os.path.join(out_logdir, 'user-data.txt')))
|
||||
- self.assertEqual(
|
||||
- 'sensitive',
|
||||
- load_file(os.path.join(out_logdir, 'run', 'cloud-init',
|
||||
- INSTANCE_JSON_SENSITIVE_FILE)))
|
||||
+ subp(["tar", "zxvf", output_tarfile, "-C", str(tmpdir)])
|
||||
+ out_logdir = tmpdir.join(date_logdir)
|
||||
+ assert "user-data" == load_file(
|
||||
+ os.path.join(out_logdir, "user-data.txt")
|
||||
+ )
|
||||
+ assert "sensitive" == load_file(
|
||||
+ os.path.join(
|
||||
+ out_logdir,
|
||||
+ "run",
|
||||
+ "cloud-init",
|
||||
+ INSTANCE_JSON_SENSITIVE_FILE,
|
||||
+ )
|
||||
+ )
|
||||
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
|
||||
--
|
||||
2.33.0
|
||||
|
||||
|
||||
257
backport-collect-logs-fix-memory-usage-SC-1590-4289.patch
Normal file
257
backport-collect-logs-fix-memory-usage-SC-1590-4289.patch
Normal file
@ -0,0 +1,257 @@
|
||||
From 9436569e2accb88b4ac3898c665d860697472f89 Mon Sep 17 00:00:00 2001
|
||||
From: Alec Warren <37227576+a-dubs@users.noreply.github.com>
|
||||
Date: Mon, 21 Aug 2023 18:40:11 -0400
|
||||
Subject: [PATCH] collect-logs fix memory usage (SC-1590) (#4289)
|
||||
|
||||
Reference:https://github.com/canonical/cloud-init/commit/9436569e2accb88b4ac3898c665d860697472f89
|
||||
Conflict:(1)change "output = subp(cmd).stdout" to "output = subp(cmd)[0]"
|
||||
(2)add "import pytest" in test_logs.py
|
||||
(3)format diffs.
|
||||
|
||||
collect-logs now streams certain outputs to avoid large memory usage
|
||||
|
||||
_stream_command_output_to_file() was added to mimic functionality of
|
||||
_write_command_output_to_file() except instead of reading the output
|
||||
of subprocess calls into memory, it streams the outputs directly to
|
||||
the target file. This new function is used when the output of a
|
||||
subprocess call does not need to be saved into a variable.
|
||||
|
||||
As far as usage goes, the main difference between the two functions
|
||||
is that the stream function does not return result of the subprocess
|
||||
call, while the write function does.
|
||||
|
||||
Fixes GH-3994
|
||||
LP: #1980150
|
||||
---
|
||||
cloudinit/cmd/devel/logs.py | 57 +++++++++++-----
|
||||
cloudinit/cmd/devel/tests/test_logs.py | 93 +++++++++++++++++++++++++-
|
||||
2 files changed, 132 insertions(+), 18 deletions(-)
|
||||
|
||||
diff --git a/cloudinit/cmd/devel/logs.py b/cloudinit/cmd/devel/logs.py
|
||||
index 84c157d..ef52cd0 100644
|
||||
--- a/cloudinit/cmd/devel/logs.py
|
||||
+++ b/cloudinit/cmd/devel/logs.py
|
||||
@@ -8,6 +8,7 @@ import argparse
|
||||
from datetime import datetime
|
||||
import os
|
||||
import shutil
|
||||
+import subprocess
|
||||
import sys
|
||||
|
||||
from cloudinit.cmd.devel import read_cfg_paths
|
||||
@@ -67,15 +68,29 @@ def _copytree_rundir_ignore_files(curdir, files):
|
||||
|
||||
def _write_command_output_to_file(cmd, filename, msg, verbosity):
|
||||
"""Helper which runs a command and writes output or error to filename."""
|
||||
+ ensure_dir(os.path.dirname(filename))
|
||||
try:
|
||||
- out, _ = subp(cmd)
|
||||
+ output = subp(cmd)[0]
|
||||
except ProcessExecutionError as e:
|
||||
write_file(filename, str(e))
|
||||
_debug("collecting %s failed.\n" % msg, 1, verbosity)
|
||||
else:
|
||||
- write_file(filename, out)
|
||||
+ write_file(filename, output)
|
||||
+ _debug("collected %s\n" % msg, 1, verbosity)
|
||||
+ return output
|
||||
+
|
||||
+
|
||||
+def _stream_command_output_to_file(cmd, filename, msg, verbosity):
|
||||
+ """Helper which runs a command and writes output or error to filename."""
|
||||
+ ensure_dir(os.path.dirname(filename))
|
||||
+ try:
|
||||
+ with open(filename, "w") as f:
|
||||
+ subprocess.call(cmd, stdout=f, stderr=f)
|
||||
+ except OSError as e:
|
||||
+ write_file(filename, str(e))
|
||||
+ _debug("collecting %s failed.\n" % msg, 1, verbosity)
|
||||
+ else:
|
||||
_debug("collected %s\n" % msg, 1, verbosity)
|
||||
- return out
|
||||
|
||||
|
||||
def _debug(msg, level, verbosity):
|
||||
@@ -108,23 +123,33 @@ def collect_logs(tarfile, include_userdata: bool, verbosity=0):
|
||||
with tempdir(dir='/tmp') as tmp_dir:
|
||||
log_dir = os.path.join(tmp_dir, log_dir)
|
||||
version = _write_command_output_to_file(
|
||||
- ['cloud-init', '--version'],
|
||||
- os.path.join(log_dir, 'version'),
|
||||
- "cloud-init --version", verbosity)
|
||||
+ cmd=["cloud-init", "--version"],
|
||||
+ filename=os.path.join(log_dir, "version"),
|
||||
+ msg="cloud-init --version",
|
||||
+ verbosity=verbosity,
|
||||
+ )
|
||||
dpkg_ver = _write_command_output_to_file(
|
||||
- ['dpkg-query', '--show', "-f=${Version}\n", 'cloud-init'],
|
||||
- os.path.join(log_dir, 'dpkg-version'),
|
||||
- "dpkg version", verbosity)
|
||||
+ cmd=["dpkg-query", "--show", "-f=${Version}\n", "cloud-init"],
|
||||
+ filename=os.path.join(log_dir, "dpkg-version"),
|
||||
+ msg="dpkg version",
|
||||
+ verbosity=verbosity,
|
||||
+ )
|
||||
if not version:
|
||||
version = dpkg_ver if dpkg_ver else "not-available"
|
||||
+ print("version: ", version)
|
||||
_debug("collected cloud-init version: %s\n" % version, 1, verbosity)
|
||||
- _write_command_output_to_file(
|
||||
- ['dmesg'], os.path.join(log_dir, 'dmesg.txt'),
|
||||
- "dmesg output", verbosity)
|
||||
- _write_command_output_to_file(
|
||||
- ['journalctl', '--boot=0', '-o', 'short-precise'],
|
||||
- os.path.join(log_dir, 'journal.txt'),
|
||||
- "systemd journal of current boot", verbosity)
|
||||
+ _stream_command_output_to_file(
|
||||
+ cmd=["dmesg"],
|
||||
+ filename=os.path.join(log_dir, "dmesg.txt"),
|
||||
+ msg="dmesg output",
|
||||
+ verbosity=verbosity,
|
||||
+ )
|
||||
+ _stream_command_output_to_file(
|
||||
+ cmd=["journalctl", "--boot=0", "-o", "short-precise"],
|
||||
+ filename=os.path.join(log_dir, "journal.txt"),
|
||||
+ msg="systemd journal of current boot",
|
||||
+ verbosity=verbosity,
|
||||
+ )
|
||||
|
||||
for log in CLOUDINIT_LOGS:
|
||||
_collect_file(log, log_dir, verbosity)
|
||||
diff --git a/cloudinit/cmd/devel/tests/test_logs.py b/cloudinit/cmd/devel/tests/test_logs.py
|
||||
index 4ef6d5d..ed10b50 100644
|
||||
--- a/cloudinit/cmd/devel/tests/test_logs.py
|
||||
+++ b/cloudinit/cmd/devel/tests/test_logs.py
|
||||
@@ -1,6 +1,7 @@
|
||||
# This file is part of cloud-init. See LICENSE file for license information.
|
||||
|
||||
from datetime import datetime
|
||||
+import pytest
|
||||
import os
|
||||
import re
|
||||
from io import StringIO
|
||||
@@ -8,6 +9,7 @@ from io import StringIO
|
||||
from cloudinit.cmd.devel import logs
|
||||
from cloudinit.sources import INSTANCE_JSON_SENSITIVE_FILE
|
||||
from cloudinit.subp import subp
|
||||
+from cloudinit.subp import SubpResult, subp
|
||||
from cloudinit.util import load_file, write_file
|
||||
from cloudinit.tests.helpers import mock
|
||||
|
||||
@@ -63,10 +65,25 @@ class TestCollectLogs:
|
||||
'Unexpected command provided to subp: {0}'.format(cmd))
|
||||
if cmd == ['tar', 'czvf', output_tarfile, date_logdir]:
|
||||
subp(cmd) # Pass through tar cmd so we can check output
|
||||
- return expected_subp[cmd_tuple], ''
|
||||
+ return SubpResult(expected_subp[cmd_tuple], "")
|
||||
+
|
||||
+ # the new _stream_command_output_to_file function uses subprocess.call
|
||||
+ # instead of subp, so we need to mock that as well
|
||||
+ def fake_subprocess_call(cmd, stdout=None, stderr=None):
|
||||
+ cmd_tuple = tuple(cmd)
|
||||
+ if cmd_tuple not in expected_subp:
|
||||
+ raise AssertionError(
|
||||
+ "Unexpected command provided to subprocess: {0}".format(
|
||||
+ cmd
|
||||
+ )
|
||||
+ )
|
||||
+ stdout.write(expected_subp[cmd_tuple])
|
||||
|
||||
fake_stderr = mock.MagicMock()
|
||||
mocker.patch(M_PATH + "subp", side_effect=fake_subp)
|
||||
+ mocker.patch(
|
||||
+ M_PATH + "subprocess.call", side_effect=fake_subprocess_call
|
||||
+ )
|
||||
mocker.patch(M_PATH + "sys.stderr", fake_stderr)
|
||||
mocker.patch(M_PATH + "CLOUDINIT_LOGS", [log1, log2])
|
||||
mocker.patch(M_PATH + "CLOUDINIT_RUN_DIR", run_dir)
|
||||
@@ -142,7 +159,7 @@ class TestCollectLogs:
|
||||
'Unexpected command provided to subp: {0}'.format(cmd))
|
||||
if cmd == ['tar', 'czvf', output_tarfile, date_logdir]:
|
||||
subp(cmd) # Pass through tar cmd so we can check output
|
||||
- return expected_subp[cmd_tuple], ''
|
||||
+ return SubpResult(expected_subp[cmd_tuple], "")
|
||||
|
||||
fake_stderr = mock.MagicMock()
|
||||
|
||||
@@ -167,3 +184,75 @@ class TestCollectLogs:
|
||||
)
|
||||
)
|
||||
fake_stderr.write.assert_any_call('Wrote %s\n' % output_tarfile)
|
||||
+
|
||||
+ @pytest.mark.parametrize(
|
||||
+ "cmd, expected_file_contents, expected_return_value",
|
||||
+ [
|
||||
+ (
|
||||
+ ["echo", "cloud-init? more like cloud-innit!"],
|
||||
+ "cloud-init? more like cloud-innit!\n",
|
||||
+ "cloud-init? more like cloud-innit!\n",
|
||||
+ ),
|
||||
+ (
|
||||
+ ["ls", "/nonexistent-directory"],
|
||||
+ (
|
||||
+ "Unexpected error while running command.\n"
|
||||
+ "Command: ['ls', '/nonexistent-directory']\n"
|
||||
+ "Exit code: 2\n"
|
||||
+ "Reason: -\n"
|
||||
+ "Stdout: \n"
|
||||
+ "Stderr: ls: cannot access '/nonexistent-directory': "
|
||||
+ "No such file or directory"
|
||||
+ ),
|
||||
+ None,
|
||||
+ ),
|
||||
+ ],
|
||||
+ )
|
||||
+ def test_write_command_output_to_file(
|
||||
+ self,
|
||||
+ m_getuid,
|
||||
+ tmpdir,
|
||||
+ cmd,
|
||||
+ expected_file_contents,
|
||||
+ expected_return_value,
|
||||
+ ):
|
||||
+ m_getuid.return_value = 100
|
||||
+ output_file = tmpdir.join("test-output-file.txt")
|
||||
+
|
||||
+ return_output = logs._write_command_output_to_file(
|
||||
+ filename=output_file,
|
||||
+ cmd=cmd,
|
||||
+ msg="",
|
||||
+ verbosity=1,
|
||||
+ )
|
||||
+
|
||||
+ assert expected_return_value == return_output
|
||||
+ assert expected_file_contents == load_file(output_file)
|
||||
+
|
||||
+ @pytest.mark.parametrize(
|
||||
+ "cmd, expected_file_contents",
|
||||
+ [
|
||||
+ (["echo", "cloud-init, shmoud-init"], "cloud-init, shmoud-init\n"),
|
||||
+ (
|
||||
+ ["ls", "/nonexistent-directory"],
|
||||
+ (
|
||||
+ "ls: cannot access '/nonexistent-directory': "
|
||||
+ "No such file or directory\n"
|
||||
+ ),
|
||||
+ ),
|
||||
+ ],
|
||||
+ )
|
||||
+ def test_stream_command_output_to_file(
|
||||
+ self, m_getuid, tmpdir, cmd, expected_file_contents
|
||||
+ ):
|
||||
+ m_getuid.return_value = 100
|
||||
+ output_file = tmpdir.join("test-output-file.txt")
|
||||
+
|
||||
+ logs._stream_command_output_to_file(
|
||||
+ filename=output_file,
|
||||
+ cmd=cmd,
|
||||
+ msg="",
|
||||
+ verbosity=1,
|
||||
+ )
|
||||
+
|
||||
+ assert expected_file_contents == load_file(output_file)
|
||||
--
|
||||
2.33.0
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
Name: cloud-init
|
||||
Version: 21.4
|
||||
Release: 20
|
||||
Release: 21
|
||||
Summary: the defacto multi-distribution package that handles early initialization of a cloud instance.
|
||||
License: ASL 2.0 or GPLv3
|
||||
URL: http://launchpad.net/cloud-init
|
||||
@ -65,6 +65,11 @@ Patch6031: backport-Fix-KeyError-when-rendering-sysconfig-IPv6-routes.patch
|
||||
Patch6032: backport-Fix-default-route-rendering-on-v2-ipv6-1973.patch
|
||||
Patch6033: backport-Drop-support-of-sk-keys-in-cc_ssh-1451.patch
|
||||
Patch6034: backport-Do-not-generate-dsa-and-ed25519-key-types-when-crypt.patch
|
||||
Patch6035: backport-cc_mounts-Fix-swapfile-not-working-on-btrfs-4319.patch
|
||||
Patch6036: backport-cc_mounts-Use-fallocate-to-create-swapfile-on-btrfs-.patch
|
||||
Patch6037: backport-cloud-config-honor-cloud_dir-setting-1523.patch
|
||||
Patch6038: backport-collect-logs-fix-memory-usage-SC-1590-4289.patch
|
||||
Patch6039: backport-Return-a-namedtuple-from-subp-1376.patch
|
||||
|
||||
BuildRequires: pkgconfig(systemd) python3-devel python3-setuptools systemd
|
||||
BuildRequires: iproute python3-configobj python3-httpretty >= 0.8.14-2
|
||||
@ -174,6 +179,14 @@ fi
|
||||
%exclude /usr/share/doc/*
|
||||
|
||||
%changelog
|
||||
* Mon Sep 18 2023 shixuantong <shixuantong1@huawei.com> - 21.4-21
|
||||
- Type:bugfix
|
||||
- CVE:NA
|
||||
- SUG:NA
|
||||
- DESC:cc_mounts: Fix swapfile not working on btrfs
|
||||
cc_mounts: Use fallocate to create swapfile on btrfs
|
||||
collect-logs fix memory usage
|
||||
|
||||
* Mon Aug 21 2023 shixuantong <shixuantong1@huawei.com> - 21.4-20
|
||||
- Type:bugfix
|
||||
- CVE:NA
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user