diff --git a/0002-deal-request-connection-error.patch b/0002-deal-request-connection-error.patch new file mode 100644 index 0000000..ffa2c59 --- /dev/null +++ b/0002-deal-request-connection-error.patch @@ -0,0 +1,247 @@ +From 8a9ec891ef23eef79766e2936a8ce5b1636b3f53 Mon Sep 17 00:00:00 2001 +From: smjiao +Date: Thu, 9 Nov 2023 09:33:23 +0800 +Subject: [PATCH] deal request connection error + +--- + ragdoll/controllers/format.py | 100 +++++++++++-------- + ragdoll/controllers/management_controller.py | 13 ++- + ragdoll/utils/object_parse.py | 13 ++- + 3 files changed, 78 insertions(+), 48 deletions(-) + +diff --git a/ragdoll/controllers/format.py b/ragdoll/controllers/format.py +index f0d2864..9676296 100644 +--- a/ragdoll/controllers/format.py ++++ b/ragdoll/controllers/format.py +@@ -28,7 +28,7 @@ class Format(object): + def domainCheck(domainName): + res = True + if not re.match(r"^[A-Za-z0-9_\.-]*$", domainName) or domainName == "" or len(domainName) > 255: +- res = False ++ res = False + return res + + @staticmethod +@@ -329,7 +329,14 @@ class Format(object): + get_real_conf_body["infos"] = get_real_conf_body_info + url = conf_tools.load_url_by_conf().get("collect_url") + headers = {"Content-Type": "application/json"} +- response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request ++ try: ++ response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request ++ except requests.exceptions.RequestException as connect_ex: ++ LOGGER.error(f"An error occurred: {connect_ex}") ++ codeNum = 500 ++ codeString = "Failed to obtain the actual configuration, please check the interface of config/collect." ++ base_rsp = BaseResponse(codeNum, codeString) ++ return base_rsp, codeNum + resp = json.loads(response.text).get("data") + resp_code = json.loads(response.text).get("code") + if (resp_code != "200") and (resp_code != "206"): +@@ -483,7 +490,6 @@ class Format(object): + sync_status = SyncStatus(domain_name=domain, + host_status=[]) + from ragdoll.utils.object_parse import ObjectParse +- directory_conf_is_synced = ConfIsSynced(file_path="", is_synced="", single_conf=[]) + + for d_real_conf in real_conf_res_text: + host_id = d_real_conf.host_id +@@ -491,6 +497,7 @@ class Format(object): + sync_status=[]) + d_real_conf_base = d_real_conf.conf_base_infos + for d_conf in d_real_conf_base: ++ directory_conf_is_synced = ConfIsSynced(file_path="", is_synced="", single_conf=[]) + d_conf_path = d_conf.file_path + + object_parse = ObjectParse() +@@ -500,15 +507,15 @@ class Format(object): + Format.deal_conf_sync_status(conf_model, d_conf, d_conf_path, directory_conf_is_synced, + host_sync_status, manage_confs) + +- if len(directory_conf_is_synced.single_conf) > 0: +- synced_flag = SYNCHRONIZED +- for single_config in directory_conf_is_synced.single_conf: +- if single_config.single_is_synced == SYNCHRONIZED: +- continue +- else: +- synced_flag = NOT_SYNCHRONIZE +- directory_conf_is_synced.is_synced = synced_flag +- host_sync_status.sync_status.append(directory_conf_is_synced) ++ if len(directory_conf_is_synced.single_conf) > 0: ++ synced_flag = SYNCHRONIZED ++ for single_config in directory_conf_is_synced.single_conf: ++ if single_config.single_is_synced == SYNCHRONIZED: ++ continue ++ else: ++ synced_flag = NOT_SYNCHRONIZE ++ directory_conf_is_synced.is_synced = synced_flag ++ host_sync_status.sync_status.append(directory_conf_is_synced) + sync_status.host_status.append(host_sync_status) + return sync_status + +@@ -516,37 +523,38 @@ class Format(object): + def deal_conf_sync_status(conf_model, d_conf, d_conf_path, directory_conf_is_synced, host_sync_status, + manage_confs): + comp_res = "" +- for dir_path in DIRECTORY_FILE_PATH_LIST: +- if str(d_conf_path).find(dir_path) == -1: +- for d_man_conf in manage_confs: +- if d_man_conf.get("file_path").split(":")[-1] != d_conf_path: +- continue +- comp_res = conf_model.conf_compare(d_man_conf.get("contents"), d_conf.conf_contens) +- conf_is_synced = ConfIsSynced(file_path=d_conf_path, +- is_synced=comp_res) +- host_sync_status.sync_status.append(conf_is_synced) +- else: +- directory_conf_is_synced.file_path = dir_path +- confContents = json.loads(d_conf.conf_contens) +- pam_conf_contents = "" +- for d_man_conf in manage_confs: +- d_man_conf_path = d_man_conf.get("file_path") +- if d_man_conf_path not in DIRECTORY_FILE_PATH_LIST: +- continue +- pam_conf_contents = d_man_conf.get("contents") ++ if d_conf_path in DIRECTORY_FILE_PATH_LIST: ++ confContents = json.loads(d_conf.conf_contens) ++ directory_conf_contents = "" ++ for d_man_conf in manage_confs: ++ d_man_conf_path = d_man_conf.get("file_path") ++ if d_man_conf_path != d_conf_path: ++ # if d_man_conf_path not in DIRECTORY_FILE_PATH_LIST: ++ continue ++ else: ++ directory_conf_is_synced.file_path = d_conf_path ++ directory_conf_contents = d_man_conf.get("contents") + +- pam_conf_contents_dict = json.loads(pam_conf_contents) ++ directory_conf_contents_dict = json.loads(directory_conf_contents) + +- for pam_conf_content_key, pam_conf_content_value in pam_conf_contents_dict.items(): +- if pam_conf_content_key not in confContents.keys(): +- single_conf = SingleConfig(single_file_path=pam_conf_content_key, +- single_is_synced=NOT_SYNCHRONIZE) +- directory_conf_is_synced.single_conf.append(single_conf) +- else: +- dst_conf = confContents.get(pam_conf_content_key) +- comp_res = conf_model.conf_compare(pam_conf_content_value, dst_conf) +- single_conf = SingleConfig(single_file_path=d_conf_path, single_is_synced=comp_res) +- directory_conf_is_synced.single_conf.append(single_conf) ++ for dir_conf_content_key, dir_conf_content_value in directory_conf_contents_dict.items(): ++ if dir_conf_content_key not in confContents.keys(): ++ single_conf = SingleConfig(single_file_path=dir_conf_content_key, ++ single_is_synced=NOT_SYNCHRONIZE) ++ directory_conf_is_synced.single_conf.append(single_conf) ++ else: ++ dst_conf = confContents.get(dir_conf_content_key) ++ comp_res = conf_model.conf_compare(dir_conf_content_value, dst_conf) ++ single_conf = SingleConfig(single_file_path=dir_conf_content_key, single_is_synced=comp_res) ++ directory_conf_is_synced.single_conf.append(single_conf) ++ else: ++ for d_man_conf in manage_confs: ++ if d_man_conf.get("file_path").split(":")[-1] != d_conf_path: ++ continue ++ comp_res = conf_model.conf_compare(d_man_conf.get("contents"), d_conf.conf_contens) ++ conf_is_synced = ConfIsSynced(file_path=d_conf_path, ++ is_synced=comp_res) ++ host_sync_status.sync_status.append(conf_is_synced) + + @staticmethod + def get_conf_type_model(d_conf_path, object_parse): +@@ -569,8 +577,14 @@ class Format(object): + content = object_parse.parse_json_to_conf(directory_file_path, directory_content) + # Configuration to the host + data = {"host_id": host_id, "file_path": directory_file_path, "content": content} +- sync_response = requests.put(sync_conf_url, data=json.dumps(data), headers=headers) +- ++ try: ++ sync_response = requests.put(sync_conf_url, data=json.dumps(data), headers=headers) ++ except requests.exceptions.RequestException as connect_ex: ++ LOGGER.error(f"An error occurred: {connect_ex}") ++ codeNum = 500 ++ codeString = "Failed to sync configuration, please check the interface of config/sync." ++ base_rsp = BaseResponse(codeNum, codeString) ++ return base_rsp, codeNum + resp_code = json.loads(sync_response.text).get('code') + resp = json.loads(sync_response.text).get('data').get('resp') + +diff --git a/ragdoll/controllers/management_controller.py b/ragdoll/controllers/management_controller.py +index 7ec6e50..101802a 100644 +--- a/ragdoll/controllers/management_controller.py ++++ b/ragdoll/controllers/management_controller.py +@@ -116,7 +116,7 @@ def add_management_confs_in_domain(body=None): # noqa: E501 + if d_conf.file_path not in DIRECTORY_FILE_PATH_LIST: + exist_host[host_id].append(d_conf.file_path) + else: +- codeNum, codeString, file_paths = object_parse.get_pam_files(d_conf, host_id) ++ codeNum, codeString, file_paths = object_parse.get_directory_files(d_conf, host_id) + if len(file_paths) == 0: + base_rsp = BaseResponse(codeNum, codeString) + return base_rsp, codeNum +@@ -129,7 +129,7 @@ def add_management_confs_in_domain(body=None): # noqa: E501 + conf_list.append(d_conf.file_path) + exist_host[host_id] = conf_list + else: +- codeNum, codeString, file_paths = object_parse.get_pam_files(d_conf, host_id) ++ codeNum, codeString, file_paths = object_parse.get_directory_files(d_conf, host_id) + if len(file_paths) == 0: + base_rsp = BaseResponse(codeNum, codeString) + return base_rsp, codeNum +@@ -146,7 +146,14 @@ def add_management_confs_in_domain(body=None): # noqa: E501 + + url = conf_tools.load_url_by_conf().get("collect_url") + headers = {"Content-Type": "application/json"} +- response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request ++ try: ++ response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request ++ except requests.exceptions.RequestException as connect_ex: ++ LOGGER.error(f"An error occurred: {connect_ex}") ++ codeNum = 500 ++ codeString = "Failed to obtain the actual configuration, please check the interface of config/collect." ++ base_rsp = BaseResponse(codeNum, codeString) ++ return base_rsp, codeNum + + response_code = json.loads(response.text).get("code") + if response_code == None: +diff --git a/ragdoll/utils/object_parse.py b/ragdoll/utils/object_parse.py +index d3c31ae..6cc4564 100644 +--- a/ragdoll/utils/object_parse.py ++++ b/ragdoll/utils/object_parse.py +@@ -14,8 +14,10 @@ import importlib + import requests + + from ragdoll.const.conf_handler_const import DIRECTORY_FILE_PATH_LIST ++from ragdoll.models import BaseResponse + from ragdoll.utils.conf_tools import ConfTools + from ragdoll.utils.yang_module import YangModule ++from ragdoll.log.log import LOGGER + + BASE_PATH = "ragdoll.config_model." + CONFIG_MODEL_NAME = "Config" +@@ -172,7 +174,7 @@ class ObjectParse(object): + + return conf_info + +- def get_pam_files(self, d_conf, host_id): ++ def get_directory_files(self, d_conf, host_id): + file_paths = list() + conf_tools = ConfTools() + file_directory = dict() +@@ -180,7 +182,14 @@ class ObjectParse(object): + file_directory['host_id'] = host_id + url = conf_tools.load_url_by_conf().get("object_file_url") + headers = {"Content-Type": "application/json"} +- response = requests.post(url, data=json.dumps(file_directory), headers=headers) ++ try: ++ response = requests.post(url, data=json.dumps(file_directory), headers=headers) ++ except requests.exceptions.RequestException as connect_ex: ++ LOGGER.error(f"An error occurred: {connect_ex}") ++ codeNum = 500 ++ codeString = "Failed to sync configuration, please check the interface of config/objectfile." ++ base_rsp = BaseResponse(codeNum, codeString) ++ return base_rsp, codeNum + response_code = json.loads(response.text).get("code") + if response_code == None: + codeNum = 500 +-- +2.38.1.windows.1 + diff --git a/gala-ragdoll.spec b/gala-ragdoll.spec index 0fecf87..5959e27 100644 --- a/gala-ragdoll.spec +++ b/gala-ragdoll.spec @@ -1,11 +1,12 @@ Name: gala-ragdoll Version: v1.4.1 -Release: 2 +Release: 3 Summary: Configuration traceability License: MulanPSL2 URL: https://gitee.com/openeuler/%{name} Source0: %{name}-%{version}.tar.gz Patch0001: 0001-fix-text-file-sync-bug.patch +Patch0002: 0002-deal-request-connection-error.patch %global debug_package %{nil} BuildRequires: python3-setuptools python3-connexion python3-werkzeug python3-libyang @@ -78,6 +79,9 @@ fi %changelog +* Thu Nov 9 2023 smjiao - v1.4.1-3 +- deal request connection error + * Wed Nov 8 2023 smjiao - v1.4.1-2 - fix text file sync bug