diff --git a/0001-solve-installation-dependency-query-error.patch b/0001-solve-installation-dependency-query-error.patch new file mode 100644 index 0000000..42851bf --- /dev/null +++ b/0001-solve-installation-dependency-query-error.patch @@ -0,0 +1,1632 @@ +diff --git a/packageship/application/apps/package/function/build_depend.py b/packageship/application/apps/package/function/build_depend.py +index 92351e7..b68eb91 100644 +--- a/packageship/application/apps/package/function/build_depend.py ++++ b/packageship/application/apps/package/function/build_depend.py +@@ -20,8 +20,10 @@ class BuildDepend(): + result_dict:A dictionary to store the data that needs to be echoed + source_dict:A dictionary to store the searched source code package name + not_found_components: Contain the package not found components ++ __already_pk_val:List of pkgKey found + """ + ++ # pylint: disable = R0902 + def __init__(self, pkg_name_list, db_list, self_build=0, history_dict=None): + """ + init class +@@ -38,6 +40,8 @@ class BuildDepend(): + self.history_dicts = history_dict if history_dict else {} + self.not_found_components = set() + ++ self.__already_pk_val = [] ++ + def build_depend_main(self): + """ + Description: Entry function +@@ -67,7 +71,8 @@ class BuildDepend(): + # Here, a place holder is needed to prevent unpacking errors during call + # 2, This function is an auxiliary function of other modules. + # The status code is not the final display status code +- return ResponseCode.SUCCESS, self.result_dict, self.source_dict, self.not_found_components ++ return (ResponseCode.SUCCESS, self.result_dict, ++ self.source_dict, self.not_found_components) + + return ResponseCode.PARAM_ERROR, None, None, set() + +@@ -80,7 +85,13 @@ class BuildDepend(): + ResponseCode: response code + Raises: + """ +- res_status, build_list, not_fd_com_build = self.search_db.get_build_depend(pkg_list) ++ (res_status, ++ build_list, ++ not_fd_com_build, ++ pk_v ++ ) = self.search_db.get_build_depend(pkg_list, self.__already_pk_val) ++ ++ self.__already_pk_val += pk_v + self.not_found_components.update(not_fd_com_build) + if not build_list: + return res_status if res_status == ResponseCode.DIS_CONNECTION_DB else \ +@@ -91,7 +102,8 @@ class BuildDepend(): + + code, res_dict, not_fd_com_install = \ + InstallDepend(self.db_list).query_install_depend(search_list, +- self.history_dicts) ++ self.history_dicts, ++ self.__already_pk_val) + self.not_found_components.update(not_fd_com_install) + if not res_dict: + return code +@@ -189,7 +201,13 @@ class BuildDepend(): + return + + next_src_set = set() +- _, bin_info_lis, not_fd_com = self.search_db.get_build_depend(pkg_name_li) ++ (_, ++ bin_info_lis, ++ not_fd_com, ++ pk_v ++ ) = self.search_db.get_build_depend(pkg_name_li, ++ self.__already_pk_val) ++ self.__already_pk_val += pk_v + self.not_found_components.update(not_fd_com) + if not bin_info_lis: + return +diff --git a/packageship/application/apps/package/function/install_depend.py b/packageship/application/apps/package/function/install_depend.py +index f3cf05e..c4afe2e 100644 +--- a/packageship/application/apps/package/function/install_depend.py ++++ b/packageship/application/apps/package/function/install_depend.py +@@ -5,9 +5,8 @@ Description: Querying for install dependencies + class: InstallDepend, DictionaryOperations + """ + from packageship.libs.log import Log +-from .searchdb import SearchDB +-from .constants import ResponseCode +-from .constants import ListNode ++from packageship.application.apps.package.function.searchdb import SearchDB ++from packageship.application.apps.package.function.constants import ResponseCode, ListNode + + LOGGER = Log(__name__) + +@@ -21,9 +20,11 @@ class InstallDepend(): + binary_dict: Contain all the binary packages info and operation + __search_db: A object of database which would be connected + not_found_components: Contain the package not found components ++ __already_pk_value: List of pkgKey found + changeLog: + """ +- #pylint: disable = too-few-public-methods ++ ++ # pylint: disable = too-few-public-methods + def __init__(self, db_list): + """ + Initialization class +@@ -34,14 +35,16 @@ class InstallDepend(): + self.db_list = db_list + self.__search_db = SearchDB(db_list) + self.not_found_components = set() ++ self.__already_pk_value = [] + +- def query_install_depend(self, binary_list, history_dicts=None): ++ def query_install_depend(self, binary_list, history_pk_val=None, history_dicts=None): + """ + Description: init result dict and determint the loop end point + Args: + binary_list: A list of binary rpm package name + history_dicts: record the searching install depend history, + defualt is None ++ history_pk_val:List of pkgKey found + Returns: + binary_dict.dictionary: + {binary_name: [ +@@ -64,7 +67,8 @@ class InstallDepend(): + if binary: + self.__search_list.append(binary) + else: +- LOGGER.logger.warning("There is a NONE in input value:" + str(binary_list)) ++ LOGGER.logger.warning("There is a NONE in input value: %s", str(binary_list)) ++ self.__already_pk_value += history_pk_val if history_pk_val else [] + while self.__search_list: + self.__query_single_install_dep(history_dicts) + return ResponseCode.SUCCESS, self.binary_dict.dictionary, self.not_found_components +@@ -78,8 +82,14 @@ class InstallDepend(): + response_code: response code + Raises: + """ +- result_list, not_found_components = map(set, self.__search_db.get_install_depend(self.__search_list)) ++ result_list, not_found_components, pk_val = map( ++ set, ++ self.__search_db.get_install_depend(self.__search_list, ++ self.__already_pk_value) ++ ) ++ + self.not_found_components.update(not_found_components) ++ self.__already_pk_value += pk_val + for search in self.__search_list: + if search not in self.binary_dict.dictionary: + self.binary_dict.init_key(key=search, parent_node=[]) +@@ -108,7 +118,7 @@ class InstallDepend(): + version=history_dicts[result.depend_name][ListNode.VERSION], + dbname=None, + parent_node=[[result.search_name, 'install']] +- ) ++ ) + else: + self.binary_dict.init_key(key=result.depend_name, + parent_node=[[result.search_name, 'install']]) +@@ -129,6 +139,7 @@ class DictionaryOperations(): + """ + self.dictionary = dict() + ++ # pylint: disable=R0913 + def init_key(self, key, src=None, version=None, dbname=None, parent_node=None): + """ + Description: Creating dictionary +@@ -146,6 +157,7 @@ class DictionaryOperations(): + else: + self.dictionary[key] = [src, version, dbname, parent_node] + ++ # pylint: disable=R0913 + def update_value(self, key, src=None, version=None, dbname=None, parent_node=None): + """ + Description: append dictionary +diff --git a/packageship/application/apps/package/function/searchdb.py b/packageship/application/apps/package/function/searchdb.py +index 400d422..1624e0d 100644 +--- a/packageship/application/apps/package/function/searchdb.py ++++ b/packageship/application/apps/package/function/searchdb.py +@@ -4,7 +4,7 @@ Description: A set for all query databases function + class: SearchDB + functions: db_priority + """ +-from collections import namedtuple ++from collections import namedtuple, Counter + + import yaml + from flask import current_app +@@ -15,10 +15,10 @@ from sqlalchemy import exists + + from packageship.libs.dbutils import DBHelper + from packageship.libs.log import Log +-from packageship.application.models.package import BinPack,SrcPack ++from packageship.application.models.package import BinPack, SrcPack + from packageship.libs.exception import ContentNoneException, Error + from packageship.system_config import DATABASE_FILE_INFO +-from .constants import ResponseCode ++from packageship.application.apps.package.function.constants import ResponseCode + + LOGGER = Log(__name__) + +@@ -50,343 +50,231 @@ class SearchDB(): + except DisconnectionError as connection_error: + current_app.logger.error(connection_error) + +- def get_install_depend(self, binary_list): ++ # Related methods of install ++ # pylint: disable=R0914 ++ def get_install_depend(self, binary_list, pk_value=None): + """ + Description: get a package install depend from database: + binary_name -> binary_id -> requires_set -> requires_id_set -> provides_set + -> install_depend_binary_id_key_list -> install_depend_binary_name_list + Args: + binary_list: a list of binary package name ++ pk_value:List of pkgKey found + Returns: + list:install depend list +- set:package not found components ++ set:package not found components, ++ pk_val:The pkgkey corresponding to the required components + Raises: + """ ++ pk_val = pk_value if pk_value else [] + result_list = [] +- get_list = [] + provides_not_found = dict() ++ + if not self.db_object_dict: +- LOGGER.logger.warning("Unable to connect to the database, \ +- check the database configuration") +- return result_list ++ LOGGER.logger.warning("Unable to connect to the database," ++ "check the database configuration") ++ return result_list, set(), pk_val ++ + if None in binary_list: + binary_list.remove(None) + search_set = set(binary_list) ++ + if not search_set: +- LOGGER.logger.warning( +- "The input is None, please check the input value.") +- return result_list +- return_tuple = namedtuple('return_tuple', +- 'depend_name depend_version depend_src_name \ +- search_name search_src_name search_version') ++ LOGGER.logger.warning("The input is None, please check the input value.") ++ return result_list, set(), pk_val ++ ++ return_tuple = namedtuple('return_tuple', [ ++ 'depend_name', ++ 'depend_version', ++ 'depend_src_name', ++ 'search_name', ++ 'search_src_name', ++ 'search_version' ++ ]) ++ + for db_name, data_base in self.db_object_dict.items(): + try: +- name_in = literal_column('name').in_(search_set) +- sql_com = text(""" +- SELECT DISTINCT +- bin_pack.NAME AS depend_name, +- bin_pack.version AS depend_version, +- s2.name AS depend_src_name, +- bin_requires.NAME AS req_name, +- bin.NAME AS search_name, +- s1.name AS search_src_name, +- bin.version AS search_version +- FROM +- ( SELECT pkgKey, NAME, version, rpm_sourcerpm FROM bin_pack WHERE {} ) bin +- LEFT JOIN src_pack s1 ON bin.rpm_sourcerpm = s1.src_name +- LEFT JOIN bin_requires ON bin.pkgKey = bin_requires.pkgKey +- LEFT JOIN bin_provides ON bin_provides.name = bin_requires.name +- LEFT JOIN bin_pack ON bin_pack.pkgKey = bin_provides.pkgKey +- LEFT JOIN src_pack s2 ON bin_pack.rpm_sourcerpm = s2.src_name; +- """.format(name_in)) +- install_set = data_base.session. \ +- execute(sql_com, {'name_{}'.format(i): v +- for i, v in enumerate(search_set, 1)}).fetchall() +- if install_set: +- # find search_name in db_name +- # depend_name's db_name will be found in next loop +- for result in install_set: +- get_list.append(result.search_name) +- if not result.depend_name and result.req_name: +- if result.req_name in provides_not_found: +- provides_not_found[result.req_name].append( +- [result.search_name, result.search_src_name, result.search_version, db_name]) +- else: +- provides_not_found[result.req_name] = [ +- [result.search_name, result.search_src_name, result.search_version, db_name]] +- else: +- obj = return_tuple( +- result.depend_name, +- result.depend_src_name, +- result.depend_version, +- result.search_name, +- result.search_src_name, +- result.search_version, +- ) +- result_list.append((obj, db_name)) +- get_set = set(get_list) +- get_list.clear() +- search_set.symmetric_difference_update(get_set) +- if not search_set: +- install_result = self._get_install_pro_in_other_database( +- provides_not_found) +- result_list.extend(install_result) +- return result_list, set(provides_not_found.keys()) +- else: ++ req_set = self._get_requires(search_set, data_base, _tp='install') ++ ++ if not req_set: + continue +- except AttributeError as error_msg: +- LOGGER.logger.error(error_msg) +- except SQLAlchemyError as error_msg: +- LOGGER.logger.error(error_msg) +- install_result = self._get_install_pro_in_other_database( +- provides_not_found) +- result_list.extend(install_result) +- for binary_name in search_set: +- result_list.append((return_tuple(None, None, None, +- binary_name, None, None), 'NOT FOUND')) +- return result_list, set(provides_not_found.keys()) + +- def get_src_name(self, binary_name): +- """ +- Description: get a package source name from database: +- bianry_name ->binary_source_name -> source_name +- Args: +- binary_name: search package's name, database preority list +- Returns: +- db_name: database name +- source_name: source name +- source_version: source version +- Raises: +- AttributeError: The object does not have this property +- SQLAlchemyError: sqlalchemy error +- """ +- for db_name, data_base in self.db_object_dict.items(): +- sql_str = """ +- SELECT DISTINCT +- src_pack.name AS source_name, +- src_pack.version AS source_version +- FROM +- bin_pack, +- src_pack +- WHERE +- src_pack.src_name = bin_pack.rpm_sourcerpm +- AND bin_pack.name = :binary_name; +- """ +- try: +- bin_obj = data_base.session.execute(text(sql_str), {"binary_name": binary_name}).fetchone() +- source_name = bin_obj.source_name +- source_version = bin_obj.source_version +- if source_name is not None: +- return ResponseCode.SUCCESS, db_name, \ +- source_name, source_version ++ (depend_set, ++ req_pk_dict, ++ pk_v, ++ not_fd_com) = self._get_provides_req_info(req_set, ++ data_base, ++ pk_val) ++ pk_val += pk_v ++ res_list, get_list = self._comb_install_list(depend_set, ++ req_pk_dict, ++ not_fd_com, ++ return_tuple, ++ db_name, ++ provides_not_found, ++ req_set) ++ ++ result_list += res_list ++ ++ search_set.symmetric_difference_update(set(get_list)) ++ ++ if not search_set: ++ result_list.extend( ++ self._get_install_pro_in_other_database(provides_not_found, ++ db_name) ++ ) ++ return result_list, set(provides_not_found.keys()), pk_val ++ + except AttributeError as error_msg: + LOGGER.logger.error(error_msg) + except SQLAlchemyError as error_msg: + LOGGER.logger.error(error_msg) +- return ResponseCode.DIS_CONNECTION_DB, None, None, None +- return ResponseCode.PACK_NAME_NOT_FOUND, None, None, None +- +- def get_sub_pack(self, source_name_list): ++ if search_set: ++ result_list.extend( ++ self._get_install_pro_in_other_database(provides_not_found) ++ ) ++ ++ for binary_name in search_set: ++ result_list.append((return_tuple(None, None, None, ++ binary_name, None, None), 'NOT FOUND')) ++ return result_list, set(provides_not_found.keys()), pk_val ++ ++ # pylint: disable=R0913 ++ @staticmethod ++ def _comb_install_list(depend_set, ++ req_pk_dict, ++ not_fd_com, ++ return_tuple, ++ db_name, ++ provides_not_found, ++ req_set): + """ +- Description: get a subpack list based on source name list: +- source_name ->source_name_id -> binary_name ++ Description: Query the corresponding installation dependency list ++ through the components of the requirements + Args: +- source_name_list: search package's name, database preority list ++ depend_set: List binary package information corresponding to the components ++ req_pk_dict:Mapping of components and binary pkgKey ++ not_fd_com: List of pkgKey found, ++ return_tuple: Named tuple format for saving information ++ db_name:current database name ++ provides_not_found:Component mapping not found in the current database ++ req_set:Package information and corresponding component information + Returns: +- response code +- result_list: subpack tuple ++ ret_list:install depend list ++ get_list:Packages that have found results + Raises: +- AttributeError: The object does not have this property +- SQLAlchemyError: sqlalchemy error + """ +- if not self.db_object_dict: +- return ResponseCode.DIS_CONNECTION_DB, None +- search_set = set([ +- source_name for source_name in source_name_list if source_name]) +- result_list = [] + get_list = [] +- if not search_set: +- return ResponseCode.INPUT_NONE, None +- for db_name, data_base in self.db_object_dict.items(): +- try: +- name_in = literal_column('name').in_(search_set) +- sql_com = text(''' +- SELECT +- bin_pack.name AS subpack_name, +- bin_pack.version AS sub_pack_version, +- src.name AS search_name, +- src.version AS search_version +- FROM +- (SELECT name,version,src_name FROM src_pack WHERE {}) src +- LEFT JOIN bin_pack on src.src_name = bin_pack.rpm_sourcerpm'''.format(name_in)) +- subpack_tuple = data_base.session. \ +- execute(sql_com, {'name_{}'.format(i): v +- for i, v in enumerate(search_set, 1)}).fetchall() +- if subpack_tuple: +- for result in subpack_tuple: +- result_list.append((result, db_name)) +- get_list.append(result.search_name) +- search_set.symmetric_difference_update(set(get_list)) +- get_list.clear() +- if not search_set: +- return ResponseCode.SUCCESS, result_list +- else: +- continue +- except AttributeError as attr_error: +- current_app.logger.error(attr_error) +- except SQLAlchemyError as sql_error: +- current_app.logger.error(sql_error) +- return_tuple = namedtuple( +- 'return_tuple', 'subpack_name sub_pack_version search_version search_name') +- for search_name in search_set: +- result_list.append( +- (return_tuple(None, None, None, search_name), 'NOT_FOUND')) +- return ResponseCode.SUCCESS, result_list ++ ret_list = [] ++ depend_info_tuple = namedtuple('depend_info', [ ++ 'depend_name', ++ 'depend_version', ++ 'depend_src_name' ++ ]) ++ depend_info_dict = { ++ info.pk: depend_info_tuple(info.depend_name, ++ info.depend_version, ++ info.depend_src_name) ++ for info in depend_set ++ } ++ ++ for req_name, search_name, search_src_name, search_version in req_set: ++ get_list.append(search_name) ++ ++ if not req_name: ++ obj = return_tuple( ++ None, ++ None, ++ None, ++ search_name, ++ search_src_name, ++ search_version, ++ ) ++ ret_list.append((obj, db_name)) ++ ++ elif req_name in req_pk_dict: ++ depend_info_t = depend_info_dict.get(req_pk_dict[req_name]) ++ obj = return_tuple( ++ depend_info_t.depend_name, ++ depend_info_t.depend_version, ++ depend_info_t.depend_src_name, ++ search_name, ++ search_src_name, ++ search_version, ++ ) ++ ret_list.append((obj, db_name)) ++ ++ else: ++ if req_name in not_fd_com: ++ if req_name not in provides_not_found: ++ provides_not_found[req_name] = [[search_name, search_src_name, ++ search_version, db_name]] ++ else: ++ provides_not_found[req_name].append([search_name, search_src_name, ++ search_version, db_name]) ++ ++ return ret_list, get_list + +- def _get_binary_in_other_database(self, not_found_binary): ++ def _get_install_pro_in_other_database(self, not_found_binary, _db_name=None): + """ + Description: Binary package name data not found in + the current database, go to other databases to try + Args: + not_found_binary: not_found_build These data cannot be found in the current database +- db_:current database name ++ _db_name:current database name + Returns: +- a list :[(search_name,source_name,bin_name, +- bin_version,db_name,search_version,req_name), +- (search_name,source_name,bin_name, +- bin_version,db_name,search_version,req_name),] ++ result_list :[return_tuple1,return_tuple2] package information + Raises: +- AttributeError: The object does not have this property +- SQLAlchemyError: sqlalchemy error + """ + if not not_found_binary: + return [] + +- return_tuple = namedtuple("return_tuple", [ +- "search_name", +- "source_name", +- "bin_name", +- "version", +- "db_name", +- "search_version", ++ return_tuple = namedtuple('return_tuple', [ ++ 'depend_name', ++ 'depend_version', ++ 'depend_src_name', ++ 'search_name', ++ 'search_src_name', ++ 'search_version' + ]) +- search_list = [] ++ + result_list = [] ++ search_set = {k for k, _ in not_found_binary.items()} ++ + for db_name, data_base in self.db_object_dict.items(): +- for key, _ in not_found_binary.items(): +- search_list.append(key) ++ if db_name == _db_name: ++ continue + +- search_set = set(search_list) +- search_list.clear() +- try: +- sql_string = text(""" +- SELECT DISTINCT +- s1.name AS source_name, +- t1.NAME AS bin_name, +- t1.version, +- t2.NAME AS req_name +- FROM +- src_pack s1, +- bin_pack t1, +- bin_provides t2 +- WHERE +- t2.{} +- AND t1.pkgKey = t2.pkgKey +- AND t1.rpm_sourcerpm = s1.src_name; +- """.format(literal_column('name').in_(search_set))) +- bin_set = data_base.session. \ +- execute(sql_string, {'name_{}'.format(i): v +- for i, v in enumerate(search_set, 1)}).fetchall() +- if bin_set: +- for result in bin_set: +- if result.req_name not in not_found_binary: +- LOGGER.logger.warning( +- result.req_name + " contains in two rpm packages!!!") +- else: +- for source_info in not_found_binary[result.req_name]: +- obj = return_tuple( +- source_info[0], +- result.source_name, +- result.bin_name, +- result.version, +- db_name, +- source_info[1] +- ) +- result_list.append(obj) +- del not_found_binary[result.req_name] +- if not not_found_binary: +- return result_list +- except AttributeError as attr_err: +- current_app.logger.error(attr_err) +- except SQLAlchemyError as sql_err: +- current_app.logger.error(sql_err) ++ parm_tuple = namedtuple("in_tuple", 'req_name') ++ in_tuple_list = [parm_tuple(k) for k, _ in not_found_binary.items()] ++ ++ depend_set, req_pk_dict, *_ = self._get_provides_req_info( ++ in_tuple_list, ++ data_base ++ ) ++ ++ depend_info_tuple = namedtuple('depend_info', [ ++ 'depend_name', ++ 'depend_version', ++ 'depend_src_name' ++ ]) ++ depend_info_dict = { ++ info.pk: depend_info_tuple(info.depend_name, ++ info.depend_version, ++ info.depend_src_name) ++ for info in depend_set ++ } ++ result_list += self._comb_install_info(search_set, ++ req_pk_dict, ++ depend_info_dict, ++ not_found_binary, ++ return_tuple, ++ db_name) ++ if not not_found_binary: ++ return result_list + + if not_found_binary: +- for key, values in not_found_binary.items(): +- for info in values: +- obj = return_tuple( +- info[0], +- None, +- None, +- None, +- 'NOT FOUND', +- info[2] +- ) +- result_list.append(obj) +- return result_list +- +- def _get_install_pro_in_other_database(self, not_found_binary): +- if not not_found_binary: +- return [] +- return_tuple = namedtuple('return_tuple', +- 'depend_name depend_version depend_src_name \ +- search_name search_src_name search_version') +- search_list = [] +- result_list = [] +- for db_name, data_base in self.db_object_dict.items(): +- for key, values in not_found_binary.items(): +- search_list.append(key) +- search_set = set(search_list) +- search_list.clear() +- sql_string = text(""" +- SELECT DISTINCT +- s1.name AS source_name, +- t1.NAME AS bin_name, +- t1.version, +- t2.NAME AS req_name +- FROM +- src_pack s1, +- bin_pack t1, +- bin_provides t2 +- WHERE +- t2.{} +- AND t1.pkgKey = t2.pkgKey +- AND t1.rpm_sourcerpm = s1.src_name; +- """.format(literal_column('name').in_(search_set))) +- bin_set = data_base.session. \ +- execute(sql_string, {'name_{}'.format(i): v +- for i, v in enumerate(search_set, 1)}).fetchall() +- if bin_set: +- for result in bin_set: +- if result.req_name not in not_found_binary: +- LOGGER.logger.warning( +- result.req_name + " contains in two rpm packages!!!") +- else: +- for binary_info in not_found_binary[result.req_name]: +- obj = return_tuple( +- result.bin_name, +- result.version, +- result.source_name, +- binary_info[0], +- binary_info[1], +- binary_info[2] +- ) +- result_list.append((obj, binary_info[3])) +- del not_found_binary[result.req_name] +- if not not_found_binary: +- return result_list +- if not_found_binary: +- for key, values in not_found_binary.items(): ++ for _, values in not_found_binary.items(): + for info in values: + obj = return_tuple( + None, +@@ -399,11 +287,52 @@ class SearchDB(): + result_list.append((obj, info[3])) + return result_list + +- def get_build_depend(self, source_name_li): ++ @staticmethod ++ def _comb_install_info(search_set, ++ req_pk_dict, ++ depend_info_dict, ++ not_found_binary, ++ return_tuple, ++ db_name): ++ """ ++ Description: Binary package name data not found in ++ the current database, go to other databases to try ++ Args: ++ search_set: The name of the component to be queried ++ req_pk_dict:Mapping of components and binary pkgKey ++ depend_info_dict:The mapping of binary pkgKey and binary information ++ not_found_binary:not_found_build These data cannot be found in the current database ++ return_tuple:Named tuple format for saving information ++ db_name:current database name ++ Returns: ++ ret_list :[return_tuple1,return_tuple2] package information ++ Raises: ++ """ ++ ret_list = [] ++ for req_name in search_set: ++ if req_name in req_pk_dict: ++ pk_ = req_pk_dict[req_name] ++ if pk_ in depend_info_dict: ++ for binary_info in not_found_binary[req_name]: ++ obj = return_tuple( ++ depend_info_dict[pk_].depend_name, ++ depend_info_dict[pk_].depend_version, ++ depend_info_dict[pk_].depend_src_name, ++ binary_info[0], ++ binary_info[1], ++ binary_info[2] ++ ) ++ ret_list.append((obj, db_name)) ++ del not_found_binary[req_name] ++ return ret_list ++ ++ # Related methods of build ++ def get_build_depend(self, source_name_li, pk_value=None): + """ + Description: get a package build depend from database + Args: + source_name_li: search package's name list ++ pk_value:List of pkgKey found + Returns: + all source pkg build depend list + structure :[(search_name,source_name,bin_name,bin_version,db_name,search_version), +@@ -422,93 +351,428 @@ class SearchDB(): + "db_name", + "search_version" + ]) +- ++ pk_val = pk_value if pk_value else [] + s_name_set = set(source_name_li) + if not s_name_set: +- return ResponseCode.PARAM_ERROR, set() ++ return ResponseCode.PARAM_ERROR, list(), set(), pk_val + + provides_not_found = dict() + build_list = [] + + for db_name, data_base in self.db_object_dict.items(): + +- build_set = [] + try: +- temp_list = list(s_name_set) +- for input_name_li in [temp_list[i:i + 900] for i in range(0, len(temp_list), 900)]: +- sql_com = text(""" +- SELECT DISTINCT +- src.NAME AS search_name, +- src.version AS search_version, +- s1.name AS source_name, +- bin_provides.pkgKey AS bin_id, +- src_requires.NAME AS req_name, +- bin_pack.version AS version, +- bin_pack.NAME AS bin_name +- FROM +- ( SELECT pkgKey, NAME, version FROM src_pack WHERE {}) src +- LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey +- LEFT JOIN bin_provides ON bin_provides.NAME = src_requires.NAME +- LEFT JOIN bin_pack ON bin_pack.pkgKey = bin_provides.pkgKey +- LEFT JOIN src_pack s1 on bin_pack.rpm_sourcerpm=s1.src_name; +- """.format(literal_column("name").in_(input_name_li))) +- res = data_base.session.execute( +- sql_com, +- {'name_{}'.format(i): v +- for i, v in enumerate(input_name_li, 1)} +- ).fetchall() +- +- build_set.extend(res) ++ req_set = self._get_requires(s_name_set, data_base, _tp='build') ++ ++ if not req_set: ++ continue ++ ++ (depend_set, ++ req_pk_dict, ++ pk_v, ++ not_fd_req) = self._get_provides_req_info(req_set, data_base) ++ ++ pk_val += pk_v ++ ret_list, get_list = self._comb_build_list(depend_set, ++ req_pk_dict, ++ not_fd_req, ++ return_tuple, ++ db_name, ++ provides_not_found, ++ req_set) ++ build_list += ret_list ++ s_name_set.symmetric_difference_update(set(get_list)) ++ if not s_name_set: ++ build_list.extend( ++ self._get_binary_in_other_database(provides_not_found, _db_name=db_name) ++ ) ++ return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()), pk_val ++ + except AttributeError as attr_err: + current_app.logger.error(attr_err) + except SQLAlchemyError as sql_err: + current_app.logger.error(sql_err) + +- if not build_set: ++ if s_name_set: ++ build_list.extend( ++ self._get_binary_in_other_database(provides_not_found) ++ ) ++ for source in s_name_set: ++ LOGGER.logger.warning( ++ "CANNOT FOUND THE SOURCE %s in all database", source) ++ ++ return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()), pk_val ++ ++ @staticmethod ++ def _comb_build_list(depend_set, ++ req_pk_dict, ++ not_fd_com, ++ return_tuple, ++ db_name, ++ provides_not_found, ++ req_set): ++ """ ++ Description: Query the corresponding build dependency list ++ through the components of the requirements ++ Args: ++ depend_set: List binary package information corresponding to the components ++ req_pk_dict:Mapping of components and binary pkgKey ++ not_fd_com: List of pkgKey found, ++ return_tuple: Named tuple format for saving information ++ db_name:current database name ++ provides_not_found:Component mapping not found in the current database ++ req_set:Package information and corresponding component information ++ Returns: ++ ret_list:install depend list ++ get_list:Packages that have found results ++ Raises: ++ """ ++ get_list = [] ++ ret_list = [] ++ depend_info_tuple = namedtuple('depend_info', [ ++ 'depend_name', ++ 'depend_version', ++ 'depend_src_name' ++ ]) ++ depend_info_dict = { ++ info.pk: depend_info_tuple(info.depend_name, ++ info.depend_version, ++ info.depend_src_name) ++ for info in depend_set ++ } ++ ++ for req_name, search_name, search_version in req_set: ++ ++ get_list.append(search_name) ++ ++ if not req_name: ++ obj = return_tuple( ++ search_name, ++ None, ++ None, ++ None, ++ db_name, ++ search_version, ++ ) ++ ret_list.append(obj) ++ ++ elif req_name in req_pk_dict: ++ depend_info_t = depend_info_dict.get(req_pk_dict[req_name]) ++ obj = return_tuple( ++ search_name, ++ depend_info_t.depend_src_name, ++ depend_info_t.depend_name, ++ depend_info_t.depend_version, ++ db_name, ++ search_version ++ ) ++ ret_list.append(obj) ++ ++ else: ++ if req_name in not_fd_com: ++ if req_name not in provides_not_found: ++ provides_not_found[req_name] = [ ++ [search_name, ++ search_version, ++ db_name] ++ ] ++ else: ++ provides_not_found[req_name].append([search_name, ++ search_version, ++ db_name]) ++ ++ return ret_list, get_list ++ ++ def _get_binary_in_other_database(self, not_found_binary, _db_name=None): ++ """ ++ Description: Binary package name data not found in ++ the current database, go to other databases to try ++ Args: ++ not_found_binary: not_found_build These data cannot be found in the current database ++ _db_name:current database name ++ Returns: ++ result_list :[return_tuple1,return_tuple2] package information ++ Raises: ++ AttributeError: The object does not have this property ++ SQLAlchemyError: sqlalchemy error ++ """ ++ if not not_found_binary: ++ return [] ++ ++ return_tuple = namedtuple("return_tuple", [ ++ "search_name", ++ "source_name", ++ "bin_name", ++ "version", ++ "db_name", ++ "search_version", ++ ]) ++ ++ result_list = [] ++ search_set = {k for k, _ in not_found_binary.items()} ++ ++ for db_name, data_base in self.db_object_dict.items(): ++ ++ if db_name == _db_name: + continue + +- # When processing source package without compilation dependency +- get_list = [] +- for result in build_set: +- get_list.append(result.search_name) +- if not result.bin_name and result.req_name: +- if result.req_name in provides_not_found: +- provides_not_found[result.req_name].append( +- [result.search_name, result.search_version, db_name] +- ) +- else: +- provides_not_found[result.req_name] = [ +- [result.search_name, result.search_version, db_name] +- ] +- else: ++ in_tuple = namedtuple("in_tuple", 'req_name') ++ in_tuple_list = [in_tuple(k) for k, _ in not_found_binary.items()] ++ ++ depend_set, req_pk_dict, *_ = self._get_provides_req_info( ++ in_tuple_list, ++ data_base ++ ) ++ ++ depend_info_tuple = namedtuple('depend_info', [ ++ 'depend_name', ++ 'depend_version', ++ 'depend_src_name' ++ ]) ++ depend_info_dict = { ++ info.pk: depend_info_tuple(info.depend_name, ++ info.depend_version, ++ info.depend_src_name) ++ for info in depend_set ++ } ++ ++ result_list += self._comb_build_info(search_set, ++ req_pk_dict, ++ depend_info_dict, ++ not_found_binary, ++ return_tuple, ++ db_name) ++ if not not_found_binary: ++ return result_list ++ ++ if not_found_binary: ++ for _, values in not_found_binary.items(): ++ for info in values: + obj = return_tuple( +- result.search_name, +- result.source_name, +- result.bin_name, +- result.version, +- db_name, +- result.search_version ++ info[0], ++ None, ++ None, ++ None, ++ 'NOT FOUND', ++ info[2] + ) +- build_list.append(obj) ++ result_list.append(obj) ++ return result_list + +- get_set = set(get_list) +- get_list.clear() +- s_name_set.symmetric_difference_update(get_set) +- if not s_name_set: +- build_result = self._get_binary_in_other_database( +- provides_not_found) +- build_list.extend(build_result) +- return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()) ++ @staticmethod ++ def _comb_build_info(search_set, ++ req_pk_dict, ++ depend_info_dict, ++ not_found_binary, ++ return_tuple, ++ db_name): ++ """ ++ Description: Binary package name data not found in ++ the current database, go to other databases to try ++ Args: ++ search_set: The name of the component to be queried ++ req_pk_dict:Mapping of components and binary pkgKey ++ depend_info_dict:The mapping of binary pkgKey and binary information ++ not_found_binary:not_found_build These data cannot be found in the current database ++ return_tuple:Named tuple format for saving information, ++ db_name:current data base name ++ Returns: ++ ret_list :[return_tuple1,return_tuple2] package information ++ Raises: ++ """ ++ ret_list = [] ++ for req_name in search_set: ++ if req_name in req_pk_dict: ++ pk_ = req_pk_dict[req_name] ++ if pk_ in depend_info_dict: ++ for binary_info in not_found_binary[req_name]: ++ obj = return_tuple( ++ binary_info[0], ++ depend_info_dict[pk_].depend_src_name, ++ depend_info_dict[pk_].depend_name, ++ depend_info_dict[pk_].depend_version, ++ db_name, ++ binary_info[1] ++ ) ++ ret_list.append(obj) ++ del not_found_binary[req_name] ++ return ret_list + +- if s_name_set: +- build_result = self._get_binary_in_other_database( +- provides_not_found) +- build_list.extend(build_result) +- for source in s_name_set: +- LOGGER.logger.warning( +- "CANNOT FOUND THE source " + source + " in all database") +- return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()) ++ # Common methods for install and build ++ @staticmethod ++ def _get_requires(search_set, data_base, _tp=None): ++ """ ++ Description: Query the dependent components of the current package ++ Args: ++ search_set: The package name to be queried ++ data_base:current database object ++ _tp:type options build or install ++ Returns: ++ req_set:List Package information and corresponding component information ++ Raises: ++ AttributeError: The object does not have this property ++ SQLAlchemyError: sqlalchemy error ++ """ ++ if _tp == 'build': ++ sql_com = text(""" ++ SELECT DISTINCT ++ src_requires.NAME AS req_name, ++ src.NAME AS search_name, ++ src.version AS search_version ++ FROM ++ ( SELECT pkgKey, NAME, version, src_name FROM src_pack WHERE {} ) src ++ LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey; ++ """.format(literal_column('name').in_(search_set))) ++ elif _tp == 'install': ++ sql_com = text(""" ++ SELECT DISTINCT ++ bin_requires.NAME AS req_name, ++ bin.NAME AS search_name, ++ s1.name as search_src_name, ++ bin.version AS search_version ++ FROM ++ ( SELECT pkgKey, NAME, version, rpm_sourcerpm FROM bin_pack WHERE {} ) bin ++ LEFT JOIN src_pack s1 ON bin.rpm_sourcerpm = s1.src_name ++ LEFT JOIN bin_requires ON bin.pkgKey = bin_requires.pkgKey; ++ """.format(literal_column('name').in_(search_set))) ++ else: ++ return [] + ++ req_set = [] ++ try: ++ req_set = data_base.session. \ ++ execute(sql_com, {'name_{}'.format(i): v ++ for i, v in enumerate(search_set, 1)}).fetchall() ++ except AttributeError as error_msg: ++ LOGGER.logger.error(error_msg) ++ except SQLAlchemyError as error_msg: ++ LOGGER.logger.error(error_msg) ++ return req_set ++ ++ def _get_provides_req_info(self, req_info, data_base, pk_value=None): ++ """ ++ Description: Get the name of the binary package ++ that provides the dependent component, ++ Filter redundant queries ++ when the same binary package is provided to multiple components ++ Args: ++ req_info: List of sqlalchemy objects with component names. ++ data_base: The database currently being queried ++ pk_value:Binary pkgKey that has been found ++ Returns: ++ depend_set: List of related dependent sqlalchemy objects ++ req_pk_dict: Mapping dictionary of component name and pkgKey ++ pk_val:update Binary pkgKey that has been found ++ not_fd_req: Components not found ++ Raises: ++ AttributeError: The object does not have this property ++ SQLAlchemyError: sqlalchemy error ++ """ ++ pk_val = pk_value if pk_value else [] ++ depend_set = [] ++ req_pk_dict = {} ++ not_fd_req = set() ++ try: ++ req_names = {req_.req_name ++ for req_ in req_info ++ if req_.req_name is not None} ++ req_name_in = literal_column('name').in_(req_names) ++ ++ sql_com_pro = text(""" ++ SELECT DISTINCT ++ NAME as req_name, ++ pkgKey ++ FROM ++ ( SELECT name, pkgKey FROM bin_provides ++ UNION ALL ++ SELECT name, pkgKey FROM bin_files ) ++ WHERE ++ {}; ++ """.format(req_name_in)) ++ ++ pkg_key_set = data_base.session.execute( ++ sql_com_pro, { ++ 'name_{}'.format(i): v ++ for i, v in enumerate(req_names, 1) ++ } ++ ).fetchall() ++ ++ req_pk_dict = dict() ++ pk_v = list() ++ ++ for req_name, pk_ in pkg_key_set: ++ if not req_name: ++ continue ++ pk_v.append(pk_) ++ if req_name not in req_pk_dict: ++ req_pk_dict[req_name] = [pk_] ++ else: ++ req_pk_dict[req_name].append(pk_) ++ ++ pk_val += pk_v ++ ++ pk_count_dic = Counter(pk_val) ++ ++ for key, values in req_pk_dict.items(): ++ count_values = list(map( ++ lambda x: pk_count_dic[x] if x in pk_count_dic else 0, values ++ )) ++ max_index = count_values.index(max(count_values)) ++ req_pk_dict[key] = values[max_index] ++ ++ not_fd_req = req_names - set(req_pk_dict.keys()) ++ depend_set = self._get_depend_info(req_pk_dict, data_base) ++ ++ except SQLAlchemyError as sql_err: ++ LOGGER.logger.error(sql_err) ++ except AttributeError as error_msg: ++ LOGGER.logger.error(error_msg) ++ ++ return depend_set, req_pk_dict, pk_val, not_fd_req ++ ++ @staticmethod ++ def _get_depend_info(req_pk_dict, data_base): ++ """ ++ Description: Obtain binary related information through binary pkgKey ++ Args: ++ req_pk_dict: Mapping dictionary of component name and pkgKey ++ data_base: The database currently being queried ++ Returns: ++ depend_set: List of related dependent sqlalchemy objects ++ Raises: ++ AttributeError: The object does not have this property ++ SQLAlchemyError: sqlalchemy error ++ """ ++ depend_set = [] ++ try: ++ bin_src_pkg_key = req_pk_dict.values() ++ pk_in = literal_column('pkgKey').in_(bin_src_pkg_key) ++ sql_bin_src = text(""" ++ SELECT DISTINCT ++ bin.pkgKey as pk, ++ bin.name AS depend_name, ++ bin.version AS depend_version, ++ src_pack.name AS depend_src_name ++ FROM ++ ( SELECT name, pkgKey,version, rpm_sourcerpm FROM bin_pack WHERE {} ) bin ++ LEFT JOIN src_pack ON src_pack.src_name = bin.rpm_sourcerpm; ++ """.format(pk_in)) ++ ++ depend_set = data_base.session.execute( ++ sql_bin_src, { ++ 'pkgKey_{}'.format(i): v ++ for i, v in enumerate(bin_src_pkg_key, 1) ++ } ++ ).fetchall() ++ ++ except SQLAlchemyError as sql_err: ++ LOGGER.logger.error(sql_err) ++ except AttributeError as error_msg: ++ LOGGER.logger.error(error_msg) ++ ++ return depend_set ++ ++ # Other methods + def binary_search_database_for_first_time(self, binary_name): + """ + Args: +@@ -553,6 +817,105 @@ class SearchDB(): + + return None, None + ++ def get_src_name(self, binary_name): ++ """ ++ Description: get a package source name from database: ++ bianry_name ->binary_source_name -> source_name ++ Args: ++ binary_name: search package's name, database preority list ++ Returns: ++ db_name: database name ++ source_name: source name ++ source_version: source version ++ Raises: ++ AttributeError: The object does not have this property ++ SQLAlchemyError: sqlalchemy error ++ """ ++ for db_name, data_base in self.db_object_dict.items(): ++ sql_str = """ ++ SELECT DISTINCT ++ src_pack.name AS source_name, ++ src_pack.version AS source_version ++ FROM ++ bin_pack, ++ src_pack ++ WHERE ++ src_pack.src_name = bin_pack.rpm_sourcerpm ++ AND bin_pack.name = :binary_name; ++ """ ++ try: ++ bin_obj = data_base.session.execute(text(sql_str), ++ {"binary_name": binary_name} ++ ).fetchone() ++ source_name = bin_obj.source_name ++ source_version = bin_obj.source_version ++ if source_name is not None: ++ return ResponseCode.SUCCESS, db_name, \ ++ source_name, source_version ++ except AttributeError as error_msg: ++ LOGGER.logger.error(error_msg) ++ except SQLAlchemyError as error_msg: ++ LOGGER.logger.error(error_msg) ++ return ResponseCode.DIS_CONNECTION_DB, None, None, None ++ return ResponseCode.PACK_NAME_NOT_FOUND, None, None, None ++ ++ def get_sub_pack(self, source_name_list): ++ """ ++ Description: get a subpack list based on source name list: ++ source_name ->source_name_id -> binary_name ++ Args: ++ source_name_list: search package's name, database preority list ++ Returns: ++ response code ++ result_list: subpack tuple ++ Raises: ++ AttributeError: The object does not have this property ++ SQLAlchemyError: sqlalchemy error ++ """ ++ if not self.db_object_dict: ++ return ResponseCode.DIS_CONNECTION_DB, None ++ search_set = {source_name for source_name in source_name_list if source_name} ++ result_list = [] ++ get_list = [] ++ if not search_set: ++ return ResponseCode.INPUT_NONE, None ++ for db_name, data_base in self.db_object_dict.items(): ++ try: ++ name_in = literal_column('name').in_(search_set) ++ sql_com = text(''' ++ SELECT ++ bin_pack.name AS subpack_name, ++ bin_pack.version AS sub_pack_version, ++ src.name AS search_name, ++ src.version AS search_version ++ FROM ++ (SELECT name,version,src_name FROM src_pack WHERE {}) src ++ LEFT JOIN bin_pack on src.src_name = bin_pack.rpm_sourcerpm ++ '''.format(name_in)) ++ subpack_tuple = data_base.session. \ ++ execute(sql_com, {'name_{}'.format(i): v ++ for i, v in enumerate(search_set, 1)}).fetchall() ++ if subpack_tuple: ++ for result in subpack_tuple: ++ result_list.append((result, db_name)) ++ get_list.append(result.search_name) ++ search_set.symmetric_difference_update(set(get_list)) ++ get_list.clear() ++ if not search_set: ++ return ResponseCode.SUCCESS, result_list ++ else: ++ continue ++ except AttributeError as attr_error: ++ current_app.logger.error(attr_error) ++ except SQLAlchemyError as sql_error: ++ current_app.logger.error(sql_error) ++ return_tuple = namedtuple( ++ 'return_tuple', 'subpack_name sub_pack_version search_version search_name') ++ for search_name in search_set: ++ result_list.append( ++ (return_tuple(None, None, None, search_name), 'NOT FOUND')) ++ return ResponseCode.SUCCESS, result_list ++ + + def db_priority(): + """ +diff --git a/packageship/application/apps/package/function/self_depend.py b/packageship/application/apps/package/function/self_depend.py +index dd72bed..1ec4c28 100644 +--- a/packageship/application/apps/package/function/self_depend.py ++++ b/packageship/application/apps/package/function/self_depend.py +@@ -8,11 +8,11 @@ class: SelfDepend, DictionaryOperations + + import copy + from packageship.libs.log import Log +-from .searchdb import SearchDB +-from .constants import ResponseCode +-from .constants import ListNode +-from .install_depend import InstallDepend as install_depend +-from .build_depend import BuildDepend as build_depend ++from packageship.application.apps.package.function.searchdb import SearchDB ++from packageship.application.apps.package.function.constants import ResponseCode, ListNode ++from packageship.application.apps.package.function.install_depend import InstallDepend \ ++ as install_depend ++from packageship.application.apps.package.function.build_depend import BuildDepend as build_depend + + LOGGER = Log(__name__) + +@@ -35,6 +35,8 @@ class SelfDepend(): + search_db: A object of database which would be connected + not_found_components: Contain the package not found components + """ ++ ++ # pylint: disable = R0902 + def __init__(self, db_list): + """ + init class +@@ -72,7 +74,8 @@ class SelfDepend(): + self.withsubpack = withsubpack + response_code = self.init_dict(packname, packtype) + if response_code != ResponseCode.SUCCESS: +- return response_code, self.binary_dict.dictionary, self.source_dicts.dictionary, self.not_found_components ++ return (response_code, self.binary_dict.dictionary, ++ self.source_dicts.dictionary, self.not_found_components) + + for key, _ in self.binary_dict.dictionary.items(): + self.search_install_list.append(key) +@@ -88,7 +91,8 @@ class SelfDepend(): + self.with_subpack() + if self.search_build_list: + self.query_build(selfbuild) +- return response_code, self.binary_dict.dictionary, self.source_dicts.dictionary, self.not_found_components ++ return (response_code, self.binary_dict.dictionary, ++ self.source_dicts.dictionary, self.not_found_components) + + def init_dict(self, packname, packtype): + """ +@@ -105,7 +109,7 @@ class SelfDepend(): + if subpack_list: + for subpack_tuple, dbname in subpack_list: + self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version) +- if dbname != 'NOT_FOUND': ++ if dbname != 'NOT FOUND': + self.binary_dict.append_bin(key=subpack_tuple.subpack_name, + src=packname, + version=subpack_tuple.search_version, +@@ -155,7 +159,8 @@ class SelfDepend(): + db_, src_version_ = self.search_db.get_version_and_db(source_name) + self.source_dicts.append_src(key=source_name, + dbname=db_ if db_ else values[ListNode.DBNAME], +- version=src_version_ if src_version_ else values[ListNode.VERSION]) ++ version=src_version_ ++ if src_version_ else values[ListNode.VERSION]) + self.search_build_list.append(source_name) + if self.withsubpack == 1: + self.search_subpack_list.append(source_name) +@@ -168,13 +173,14 @@ class SelfDepend(): + Raises: + """ + if None in self.search_subpack_list: +- LOGGER.logger.warning("There is a NONE in input value:" + \ +- str(self.search_subpack_list)) ++ LOGGER.logger.warning("There is a NONE in input value: %s", ++ str(self.search_subpack_list)) + self.search_subpack_list.remove(None) + _, result_list = self.search_db.get_sub_pack(self.search_subpack_list) + for subpack_tuple, dbname in result_list: +- if dbname != 'NOT_FOUND': +- if subpack_tuple.subpack_name and subpack_tuple.subpack_name not in self.binary_dict.dictionary: ++ if dbname != 'NOT FOUND': ++ if subpack_tuple.subpack_name and subpack_tuple.subpack_name \ ++ not in self.binary_dict.dictionary: + self.binary_dict.append_bin(key=subpack_tuple.subpack_name, + src=subpack_tuple.search_name, + version=subpack_tuple.sub_pack_version, +@@ -214,7 +220,7 @@ class SelfDepend(): + self.search_build_list.clear() + for key, values in self.result_tmp.items(): + if not key: +- LOGGER.logger.warning("key is NONE for value = " + str(values)) ++ LOGGER.logger.warning("key is NONE for value = %s", str(values)) + continue + if key not in self.binary_dict.dictionary and values[0] != 'source': + self.binary_dict.dictionary[key] = copy.deepcopy(values) +@@ -225,11 +231,13 @@ class SelfDepend(): + db_, src_version_ = self.search_db.get_version_and_db(source_name) + self.source_dicts.append_src(key=source_name, + dbname=db_ if db_ else values[ListNode.DBNAME], +- version=src_version_ if src_version_ else values[ListNode.VERSION]) ++ version=src_version_ ++ if src_version_ else values[ListNode.VERSION]) + if self.withsubpack == 1: + self.search_subpack_list.append(source_name) + elif key in self.binary_dict.dictionary: +- self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST]) ++ self.binary_dict.update_value(key=key, ++ parent_list=values[ListNode.PARENT_LIST]) + + def query_selfbuild(self): + """ +@@ -246,7 +254,7 @@ class SelfDepend(): + self.not_found_components.update(not_fd_com) + for key, values in self.result_tmp.items(): + if not key: +- LOGGER.logger.warning("key is NONE for value = " + str(values)) ++ LOGGER.logger.warning("key is NONE for value = %s", str(values)) + continue + if key in self.binary_dict.dictionary: + self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST]) +@@ -255,11 +263,11 @@ class SelfDepend(): + self.search_install_list.append(key) + for key, values in source_dicts_tmp.items(): + if not key: +- LOGGER.logger.warning("key is NONE for value = " + str(values)) ++ LOGGER.logger.warning("key is NONE for value = %s", str(values)) + continue + if key not in self.source_dicts.dictionary: + self.source_dicts.dictionary[key] = copy.deepcopy(values) +- if self.with_subpack == 1: ++ if self.withsubpack == 1: + self.search_subpack_list.append(key) + self.search_build_list.clear() + +@@ -289,6 +297,7 @@ class DictionaryOperations(): + """ + self.dictionary[key] = [dbname, version] + ++ # pylint: disable=R0913 + def append_bin(self, key, src=None, version=None, dbname=None, parent_node=None): + """ + Description: Appending binary dictionary +diff --git a/packageship/pkgship b/packageship/pkgship +index e19ddc4..9210bd2 100644 +--- a/packageship/pkgship ++++ b/packageship/pkgship +@@ -20,4 +20,4 @@ if __name__ == '__main__': + main() + except Exception as error: + print('Command execution error please try again ') +- print(e.message) ++ print(error.message) +diff --git a/packageship/pkgshipd b/packageship/pkgshipd +index fef39e3..2035b75 100755 +--- a/packageship/pkgshipd ++++ b/packageship/pkgshipd +@@ -12,23 +12,35 @@ fi + + user=$(id | awk '{print $2}' | cut -d = -f 2) + if [ "$user" == "0(root)" ]; then +- echo "[INFO] Current user is root" ++ echo "[INFO] Current user is root." + else +- echo "[ERROR] Current user is not root, the service don't support common user." ++ echo "[ERROR] Current user is not root." + exit 1 + fi + + function check_config_file(){ + echo "[INFO] Check validation of config file." + check_null +- ++ + echo "[INFO] Check validation of ip addresses." + write_port=$(get_config "$service" "write_port") + query_port=$(get_config "$service" "query_port") + write_ip_addr=$(get_config "$service" "write_ip_addr") + query_ip_addr=$(get_config "$service" "query_ip_addr") +- check_addr $write_ip_addr $write_port +- check_addr $query_ip_addr $query_port ++ if [[ -z $write_ip_addr ]]; then ++ echo "[ERROR] The value of below config names is None in: $SYS_PATH/package.ini, Please check these parameters: write_ip_addr" ++ exit 1 ++ else ++ check_addr $write_ip_addr $write_port ++ fi ++ ++ if [[ -z $query_ip_addr ]]; then ++ echo "[ERROR] The value of below config names is None in: $SYS_PATH/package.ini, Please check these parameters: query_ip_addr" ++ exit 1 ++ else ++ check_addr $query_ip_addr $query_port ++ fi ++ + echo "[INFO] IP addresses are all valid." + + echo "[INFO] Check validation of numbers." +@@ -47,8 +59,8 @@ function check_config_file(){ + echo "[INFO] Check validation of words." + log_level=$(get_config "$service" "log_level") + open=$(get_config "$service" "open") +- check_word $log_level "INFO|DEBUG|WARNING|ERROR|CRITICAL" "log_level" +- check_word $open "True|False" "open" ++ check_word "log_level" "INFO|DEBUG|WARNING|ERROR|CRITICAL" $log_level ++ check_word "open" "True|False" $open + echo "[INFO] All words are valid." + + echo "[INFO] Config file checked valid." +@@ -67,7 +79,7 @@ function check_addr(){ + echo "[ERROR] Invalid ip of $1" + exit 1 + fi +- check_num $2 "port" ++ check_num ${2-"port"} "port" + if [[ $2 -gt 65534 || $2 -lt 1025 ]]; then + echo "[ERROR] Invalid port of $2" + exit 1 +@@ -100,16 +112,21 @@ function check_num(){ + } + + function check_word(){ +- result=`echo $1 | grep -wE "$2"` ++ if [ -z $3 ]; then ++ echo "[ERROR] The value of below config names is None in: $SYS_PATH/package.ini, Please check these parameters: $1" ++ exit 1 ++ fi ++ ++ result=`echo $3 | grep -wE "$2"` + if [ $? -ne 0 ]; then +- echo "[ERROR] $3 should be $2." ++ echo "[ERROR] $1 should be $2." + exit 1 + fi + } + + + function get_config(){ +- cat $SYS_PATH/package.ini | grep -E ^$2 | sed s/[[:space:]]//g | awk 'BEGIN{FS="="}{print $2}' ++ cat $SYS_PATH/package.ini | grep -E ^$2 | sed 's/[[:space:]]//g' | awk 'BEGIN{FS="="}{print $2}' + } + + function create_config_file(){ +@@ -120,12 +137,12 @@ function create_config_file(){ + harakiri=$(get_config "$service" "harakiri") + uwsgi_file_path=$(find /usr/lib/ -name "packageship" | head -n 1) + echo "[INFO] run packageship under path: $uwsgi_file_path" +- if [ $service = "manage" -o $service = "all" ];then ++ if [ $service = "manage" -o $service = "all" ]; then + write_port=$(get_config "$service" "write_port") + write_ip_addr=$(get_config "$service" "write_ip_addr") + if [[ -z "$daemonize" ]] || [[ -z "$buffer_size" ]] || [[ -z "$write_ip_addr" ]] || [[ -z "$http_timeout" ]] || [[ -z "$harakiri" ]] || [[ -z "$write_port" ]]; + then +- echo "[ERROR] CAN NOT find all config name in: $SYS_PATH/package.ini, Please check the file" ++ echo "[ERROR] CAN NOT find all config name in: $SYS_PATH/package.ini, Please check the file" + echo "[ERROR] The following config name is needed: daemonize, buffer-size, write_port, write_ip_addr, harakiri and http-timeout" + exit 1 + fi diff --git a/pkgship-1.1.0.tar.gz b/pkgship-1.1.0.tar.gz index acc9a1a..9e45007 100644 Binary files a/pkgship-1.1.0.tar.gz and b/pkgship-1.1.0.tar.gz differ diff --git a/pkgship.spec b/pkgship.spec old mode 100755 new mode 100644 index 606ce5c..edc14f7 --- a/pkgship.spec +++ b/pkgship.spec @@ -1,11 +1,15 @@ Name: pkgship Version: 1.1.0 -Release: 3 +Release: 4 Summary: Pkgship implements rpm package dependence ,maintainer, patch query and so no. License: Mulan 2.0 URL: https://gitee.com/openeuler/openEuler-Advisor Source0: https://gitee.com/openeuler/openEuler-Advisor/pkgship-%{version}.tar.gz +# Modify the query logic of package information, reduce redundant queries and align dnf query results, +# extract multiplexing functions, add corresponding docString, and clear pylint +Patch0: 0001-solve-installation-dependency-query-error.patch + BuildArch: noarch BuildRequires: python3-flask-restful python3-flask python3 python3-pyyaml python3-sqlalchemy @@ -21,7 +25,7 @@ Requires: python3-pandas python3-dateutil python3-XlsxWriter python3-xlrd python Pkgship implements rpm package dependence ,maintainer, patch query and so no. %prep -%autosetup -n pkgship-%{version} +%autosetup -n pkgship-%{version} -p1 %build %py3_build @@ -54,6 +58,9 @@ rm -rf $log_path %attr(0755,root,root) %{_bindir}/pkgship %changelog +* Thu Sep 17 2020 Shenmei Tu - 1.0-0-4 +- Modify the query logic of package information, reduce redundant queries and align dnf query results, extract multiplexing functions, add corresponding docString, and clear pylint + * Fri Sep 11 2020 Yiru Wang - 1.1.0-3 - #I1UCM8, #I1UC8G: Modify some config files' permission issue; - #I1TIYQ: Add concurrent-log-handler module to fix log resource conflict issue