diff --git a/0001-solve-installation-dependency-query-error.patch b/0001-solve-installation-dependency-query-error.patch deleted file mode 100644 index 42851bf..0000000 --- a/0001-solve-installation-dependency-query-error.patch +++ /dev/null @@ -1,1632 +0,0 @@ -diff --git a/packageship/application/apps/package/function/build_depend.py b/packageship/application/apps/package/function/build_depend.py -index 92351e7..b68eb91 100644 ---- a/packageship/application/apps/package/function/build_depend.py -+++ b/packageship/application/apps/package/function/build_depend.py -@@ -20,8 +20,10 @@ class BuildDepend(): - result_dict:A dictionary to store the data that needs to be echoed - source_dict:A dictionary to store the searched source code package name - not_found_components: Contain the package not found components -+ __already_pk_val:List of pkgKey found - """ - -+ # pylint: disable = R0902 - def __init__(self, pkg_name_list, db_list, self_build=0, history_dict=None): - """ - init class -@@ -38,6 +40,8 @@ class BuildDepend(): - self.history_dicts = history_dict if history_dict else {} - self.not_found_components = set() - -+ self.__already_pk_val = [] -+ - def build_depend_main(self): - """ - Description: Entry function -@@ -67,7 +71,8 @@ class BuildDepend(): - # Here, a place holder is needed to prevent unpacking errors during call - # 2, This function is an auxiliary function of other modules. - # The status code is not the final display status code -- return ResponseCode.SUCCESS, self.result_dict, self.source_dict, self.not_found_components -+ return (ResponseCode.SUCCESS, self.result_dict, -+ self.source_dict, self.not_found_components) - - return ResponseCode.PARAM_ERROR, None, None, set() - -@@ -80,7 +85,13 @@ class BuildDepend(): - ResponseCode: response code - Raises: - """ -- res_status, build_list, not_fd_com_build = self.search_db.get_build_depend(pkg_list) -+ (res_status, -+ build_list, -+ not_fd_com_build, -+ pk_v -+ ) = self.search_db.get_build_depend(pkg_list, self.__already_pk_val) -+ -+ self.__already_pk_val += pk_v - self.not_found_components.update(not_fd_com_build) - if not build_list: - return res_status if res_status == ResponseCode.DIS_CONNECTION_DB else \ -@@ -91,7 +102,8 @@ class BuildDepend(): - - code, res_dict, not_fd_com_install = \ - InstallDepend(self.db_list).query_install_depend(search_list, -- self.history_dicts) -+ self.history_dicts, -+ self.__already_pk_val) - self.not_found_components.update(not_fd_com_install) - if not res_dict: - return code -@@ -189,7 +201,13 @@ class BuildDepend(): - return - - next_src_set = set() -- _, bin_info_lis, not_fd_com = self.search_db.get_build_depend(pkg_name_li) -+ (_, -+ bin_info_lis, -+ not_fd_com, -+ pk_v -+ ) = self.search_db.get_build_depend(pkg_name_li, -+ self.__already_pk_val) -+ self.__already_pk_val += pk_v - self.not_found_components.update(not_fd_com) - if not bin_info_lis: - return -diff --git a/packageship/application/apps/package/function/install_depend.py b/packageship/application/apps/package/function/install_depend.py -index f3cf05e..c4afe2e 100644 ---- a/packageship/application/apps/package/function/install_depend.py -+++ b/packageship/application/apps/package/function/install_depend.py -@@ -5,9 +5,8 @@ Description: Querying for install dependencies - class: InstallDepend, DictionaryOperations - """ - from packageship.libs.log import Log --from .searchdb import SearchDB --from .constants import ResponseCode --from .constants import ListNode -+from packageship.application.apps.package.function.searchdb import SearchDB -+from packageship.application.apps.package.function.constants import ResponseCode, ListNode - - LOGGER = Log(__name__) - -@@ -21,9 +20,11 @@ class InstallDepend(): - binary_dict: Contain all the binary packages info and operation - __search_db: A object of database which would be connected - not_found_components: Contain the package not found components -+ __already_pk_value: List of pkgKey found - changeLog: - """ -- #pylint: disable = too-few-public-methods -+ -+ # pylint: disable = too-few-public-methods - def __init__(self, db_list): - """ - Initialization class -@@ -34,14 +35,16 @@ class InstallDepend(): - self.db_list = db_list - self.__search_db = SearchDB(db_list) - self.not_found_components = set() -+ self.__already_pk_value = [] - -- def query_install_depend(self, binary_list, history_dicts=None): -+ def query_install_depend(self, binary_list, history_pk_val=None, history_dicts=None): - """ - Description: init result dict and determint the loop end point - Args: - binary_list: A list of binary rpm package name - history_dicts: record the searching install depend history, - defualt is None -+ history_pk_val:List of pkgKey found - Returns: - binary_dict.dictionary: - {binary_name: [ -@@ -64,7 +67,8 @@ class InstallDepend(): - if binary: - self.__search_list.append(binary) - else: -- LOGGER.logger.warning("There is a NONE in input value:" + str(binary_list)) -+ LOGGER.logger.warning("There is a NONE in input value: %s", str(binary_list)) -+ self.__already_pk_value += history_pk_val if history_pk_val else [] - while self.__search_list: - self.__query_single_install_dep(history_dicts) - return ResponseCode.SUCCESS, self.binary_dict.dictionary, self.not_found_components -@@ -78,8 +82,14 @@ class InstallDepend(): - response_code: response code - Raises: - """ -- result_list, not_found_components = map(set, self.__search_db.get_install_depend(self.__search_list)) -+ result_list, not_found_components, pk_val = map( -+ set, -+ self.__search_db.get_install_depend(self.__search_list, -+ self.__already_pk_value) -+ ) -+ - self.not_found_components.update(not_found_components) -+ self.__already_pk_value += pk_val - for search in self.__search_list: - if search not in self.binary_dict.dictionary: - self.binary_dict.init_key(key=search, parent_node=[]) -@@ -108,7 +118,7 @@ class InstallDepend(): - version=history_dicts[result.depend_name][ListNode.VERSION], - dbname=None, - parent_node=[[result.search_name, 'install']] -- ) -+ ) - else: - self.binary_dict.init_key(key=result.depend_name, - parent_node=[[result.search_name, 'install']]) -@@ -129,6 +139,7 @@ class DictionaryOperations(): - """ - self.dictionary = dict() - -+ # pylint: disable=R0913 - def init_key(self, key, src=None, version=None, dbname=None, parent_node=None): - """ - Description: Creating dictionary -@@ -146,6 +157,7 @@ class DictionaryOperations(): - else: - self.dictionary[key] = [src, version, dbname, parent_node] - -+ # pylint: disable=R0913 - def update_value(self, key, src=None, version=None, dbname=None, parent_node=None): - """ - Description: append dictionary -diff --git a/packageship/application/apps/package/function/searchdb.py b/packageship/application/apps/package/function/searchdb.py -index 400d422..1624e0d 100644 ---- a/packageship/application/apps/package/function/searchdb.py -+++ b/packageship/application/apps/package/function/searchdb.py -@@ -4,7 +4,7 @@ Description: A set for all query databases function - class: SearchDB - functions: db_priority - """ --from collections import namedtuple -+from collections import namedtuple, Counter - - import yaml - from flask import current_app -@@ -15,10 +15,10 @@ from sqlalchemy import exists - - from packageship.libs.dbutils import DBHelper - from packageship.libs.log import Log --from packageship.application.models.package import BinPack,SrcPack -+from packageship.application.models.package import BinPack, SrcPack - from packageship.libs.exception import ContentNoneException, Error - from packageship.system_config import DATABASE_FILE_INFO --from .constants import ResponseCode -+from packageship.application.apps.package.function.constants import ResponseCode - - LOGGER = Log(__name__) - -@@ -50,343 +50,231 @@ class SearchDB(): - except DisconnectionError as connection_error: - current_app.logger.error(connection_error) - -- def get_install_depend(self, binary_list): -+ # Related methods of install -+ # pylint: disable=R0914 -+ def get_install_depend(self, binary_list, pk_value=None): - """ - Description: get a package install depend from database: - binary_name -> binary_id -> requires_set -> requires_id_set -> provides_set - -> install_depend_binary_id_key_list -> install_depend_binary_name_list - Args: - binary_list: a list of binary package name -+ pk_value:List of pkgKey found - Returns: - list:install depend list -- set:package not found components -+ set:package not found components, -+ pk_val:The pkgkey corresponding to the required components - Raises: - """ -+ pk_val = pk_value if pk_value else [] - result_list = [] -- get_list = [] - provides_not_found = dict() -+ - if not self.db_object_dict: -- LOGGER.logger.warning("Unable to connect to the database, \ -- check the database configuration") -- return result_list -+ LOGGER.logger.warning("Unable to connect to the database," -+ "check the database configuration") -+ return result_list, set(), pk_val -+ - if None in binary_list: - binary_list.remove(None) - search_set = set(binary_list) -+ - if not search_set: -- LOGGER.logger.warning( -- "The input is None, please check the input value.") -- return result_list -- return_tuple = namedtuple('return_tuple', -- 'depend_name depend_version depend_src_name \ -- search_name search_src_name search_version') -+ LOGGER.logger.warning("The input is None, please check the input value.") -+ return result_list, set(), pk_val -+ -+ return_tuple = namedtuple('return_tuple', [ -+ 'depend_name', -+ 'depend_version', -+ 'depend_src_name', -+ 'search_name', -+ 'search_src_name', -+ 'search_version' -+ ]) -+ - for db_name, data_base in self.db_object_dict.items(): - try: -- name_in = literal_column('name').in_(search_set) -- sql_com = text(""" -- SELECT DISTINCT -- bin_pack.NAME AS depend_name, -- bin_pack.version AS depend_version, -- s2.name AS depend_src_name, -- bin_requires.NAME AS req_name, -- bin.NAME AS search_name, -- s1.name AS search_src_name, -- bin.version AS search_version -- FROM -- ( SELECT pkgKey, NAME, version, rpm_sourcerpm FROM bin_pack WHERE {} ) bin -- LEFT JOIN src_pack s1 ON bin.rpm_sourcerpm = s1.src_name -- LEFT JOIN bin_requires ON bin.pkgKey = bin_requires.pkgKey -- LEFT JOIN bin_provides ON bin_provides.name = bin_requires.name -- LEFT JOIN bin_pack ON bin_pack.pkgKey = bin_provides.pkgKey -- LEFT JOIN src_pack s2 ON bin_pack.rpm_sourcerpm = s2.src_name; -- """.format(name_in)) -- install_set = data_base.session. \ -- execute(sql_com, {'name_{}'.format(i): v -- for i, v in enumerate(search_set, 1)}).fetchall() -- if install_set: -- # find search_name in db_name -- # depend_name's db_name will be found in next loop -- for result in install_set: -- get_list.append(result.search_name) -- if not result.depend_name and result.req_name: -- if result.req_name in provides_not_found: -- provides_not_found[result.req_name].append( -- [result.search_name, result.search_src_name, result.search_version, db_name]) -- else: -- provides_not_found[result.req_name] = [ -- [result.search_name, result.search_src_name, result.search_version, db_name]] -- else: -- obj = return_tuple( -- result.depend_name, -- result.depend_src_name, -- result.depend_version, -- result.search_name, -- result.search_src_name, -- result.search_version, -- ) -- result_list.append((obj, db_name)) -- get_set = set(get_list) -- get_list.clear() -- search_set.symmetric_difference_update(get_set) -- if not search_set: -- install_result = self._get_install_pro_in_other_database( -- provides_not_found) -- result_list.extend(install_result) -- return result_list, set(provides_not_found.keys()) -- else: -+ req_set = self._get_requires(search_set, data_base, _tp='install') -+ -+ if not req_set: - continue -- except AttributeError as error_msg: -- LOGGER.logger.error(error_msg) -- except SQLAlchemyError as error_msg: -- LOGGER.logger.error(error_msg) -- install_result = self._get_install_pro_in_other_database( -- provides_not_found) -- result_list.extend(install_result) -- for binary_name in search_set: -- result_list.append((return_tuple(None, None, None, -- binary_name, None, None), 'NOT FOUND')) -- return result_list, set(provides_not_found.keys()) - -- def get_src_name(self, binary_name): -- """ -- Description: get a package source name from database: -- bianry_name ->binary_source_name -> source_name -- Args: -- binary_name: search package's name, database preority list -- Returns: -- db_name: database name -- source_name: source name -- source_version: source version -- Raises: -- AttributeError: The object does not have this property -- SQLAlchemyError: sqlalchemy error -- """ -- for db_name, data_base in self.db_object_dict.items(): -- sql_str = """ -- SELECT DISTINCT -- src_pack.name AS source_name, -- src_pack.version AS source_version -- FROM -- bin_pack, -- src_pack -- WHERE -- src_pack.src_name = bin_pack.rpm_sourcerpm -- AND bin_pack.name = :binary_name; -- """ -- try: -- bin_obj = data_base.session.execute(text(sql_str), {"binary_name": binary_name}).fetchone() -- source_name = bin_obj.source_name -- source_version = bin_obj.source_version -- if source_name is not None: -- return ResponseCode.SUCCESS, db_name, \ -- source_name, source_version -+ (depend_set, -+ req_pk_dict, -+ pk_v, -+ not_fd_com) = self._get_provides_req_info(req_set, -+ data_base, -+ pk_val) -+ pk_val += pk_v -+ res_list, get_list = self._comb_install_list(depend_set, -+ req_pk_dict, -+ not_fd_com, -+ return_tuple, -+ db_name, -+ provides_not_found, -+ req_set) -+ -+ result_list += res_list -+ -+ search_set.symmetric_difference_update(set(get_list)) -+ -+ if not search_set: -+ result_list.extend( -+ self._get_install_pro_in_other_database(provides_not_found, -+ db_name) -+ ) -+ return result_list, set(provides_not_found.keys()), pk_val -+ - except AttributeError as error_msg: - LOGGER.logger.error(error_msg) - except SQLAlchemyError as error_msg: - LOGGER.logger.error(error_msg) -- return ResponseCode.DIS_CONNECTION_DB, None, None, None -- return ResponseCode.PACK_NAME_NOT_FOUND, None, None, None -- -- def get_sub_pack(self, source_name_list): -+ if search_set: -+ result_list.extend( -+ self._get_install_pro_in_other_database(provides_not_found) -+ ) -+ -+ for binary_name in search_set: -+ result_list.append((return_tuple(None, None, None, -+ binary_name, None, None), 'NOT FOUND')) -+ return result_list, set(provides_not_found.keys()), pk_val -+ -+ # pylint: disable=R0913 -+ @staticmethod -+ def _comb_install_list(depend_set, -+ req_pk_dict, -+ not_fd_com, -+ return_tuple, -+ db_name, -+ provides_not_found, -+ req_set): - """ -- Description: get a subpack list based on source name list: -- source_name ->source_name_id -> binary_name -+ Description: Query the corresponding installation dependency list -+ through the components of the requirements - Args: -- source_name_list: search package's name, database preority list -+ depend_set: List binary package information corresponding to the components -+ req_pk_dict:Mapping of components and binary pkgKey -+ not_fd_com: List of pkgKey found, -+ return_tuple: Named tuple format for saving information -+ db_name:current database name -+ provides_not_found:Component mapping not found in the current database -+ req_set:Package information and corresponding component information - Returns: -- response code -- result_list: subpack tuple -+ ret_list:install depend list -+ get_list:Packages that have found results - Raises: -- AttributeError: The object does not have this property -- SQLAlchemyError: sqlalchemy error - """ -- if not self.db_object_dict: -- return ResponseCode.DIS_CONNECTION_DB, None -- search_set = set([ -- source_name for source_name in source_name_list if source_name]) -- result_list = [] - get_list = [] -- if not search_set: -- return ResponseCode.INPUT_NONE, None -- for db_name, data_base in self.db_object_dict.items(): -- try: -- name_in = literal_column('name').in_(search_set) -- sql_com = text(''' -- SELECT -- bin_pack.name AS subpack_name, -- bin_pack.version AS sub_pack_version, -- src.name AS search_name, -- src.version AS search_version -- FROM -- (SELECT name,version,src_name FROM src_pack WHERE {}) src -- LEFT JOIN bin_pack on src.src_name = bin_pack.rpm_sourcerpm'''.format(name_in)) -- subpack_tuple = data_base.session. \ -- execute(sql_com, {'name_{}'.format(i): v -- for i, v in enumerate(search_set, 1)}).fetchall() -- if subpack_tuple: -- for result in subpack_tuple: -- result_list.append((result, db_name)) -- get_list.append(result.search_name) -- search_set.symmetric_difference_update(set(get_list)) -- get_list.clear() -- if not search_set: -- return ResponseCode.SUCCESS, result_list -- else: -- continue -- except AttributeError as attr_error: -- current_app.logger.error(attr_error) -- except SQLAlchemyError as sql_error: -- current_app.logger.error(sql_error) -- return_tuple = namedtuple( -- 'return_tuple', 'subpack_name sub_pack_version search_version search_name') -- for search_name in search_set: -- result_list.append( -- (return_tuple(None, None, None, search_name), 'NOT_FOUND')) -- return ResponseCode.SUCCESS, result_list -+ ret_list = [] -+ depend_info_tuple = namedtuple('depend_info', [ -+ 'depend_name', -+ 'depend_version', -+ 'depend_src_name' -+ ]) -+ depend_info_dict = { -+ info.pk: depend_info_tuple(info.depend_name, -+ info.depend_version, -+ info.depend_src_name) -+ for info in depend_set -+ } -+ -+ for req_name, search_name, search_src_name, search_version in req_set: -+ get_list.append(search_name) -+ -+ if not req_name: -+ obj = return_tuple( -+ None, -+ None, -+ None, -+ search_name, -+ search_src_name, -+ search_version, -+ ) -+ ret_list.append((obj, db_name)) -+ -+ elif req_name in req_pk_dict: -+ depend_info_t = depend_info_dict.get(req_pk_dict[req_name]) -+ obj = return_tuple( -+ depend_info_t.depend_name, -+ depend_info_t.depend_version, -+ depend_info_t.depend_src_name, -+ search_name, -+ search_src_name, -+ search_version, -+ ) -+ ret_list.append((obj, db_name)) -+ -+ else: -+ if req_name in not_fd_com: -+ if req_name not in provides_not_found: -+ provides_not_found[req_name] = [[search_name, search_src_name, -+ search_version, db_name]] -+ else: -+ provides_not_found[req_name].append([search_name, search_src_name, -+ search_version, db_name]) -+ -+ return ret_list, get_list - -- def _get_binary_in_other_database(self, not_found_binary): -+ def _get_install_pro_in_other_database(self, not_found_binary, _db_name=None): - """ - Description: Binary package name data not found in - the current database, go to other databases to try - Args: - not_found_binary: not_found_build These data cannot be found in the current database -- db_:current database name -+ _db_name:current database name - Returns: -- a list :[(search_name,source_name,bin_name, -- bin_version,db_name,search_version,req_name), -- (search_name,source_name,bin_name, -- bin_version,db_name,search_version,req_name),] -+ result_list :[return_tuple1,return_tuple2] package information - Raises: -- AttributeError: The object does not have this property -- SQLAlchemyError: sqlalchemy error - """ - if not not_found_binary: - return [] - -- return_tuple = namedtuple("return_tuple", [ -- "search_name", -- "source_name", -- "bin_name", -- "version", -- "db_name", -- "search_version", -+ return_tuple = namedtuple('return_tuple', [ -+ 'depend_name', -+ 'depend_version', -+ 'depend_src_name', -+ 'search_name', -+ 'search_src_name', -+ 'search_version' - ]) -- search_list = [] -+ - result_list = [] -+ search_set = {k for k, _ in not_found_binary.items()} -+ - for db_name, data_base in self.db_object_dict.items(): -- for key, _ in not_found_binary.items(): -- search_list.append(key) -+ if db_name == _db_name: -+ continue - -- search_set = set(search_list) -- search_list.clear() -- try: -- sql_string = text(""" -- SELECT DISTINCT -- s1.name AS source_name, -- t1.NAME AS bin_name, -- t1.version, -- t2.NAME AS req_name -- FROM -- src_pack s1, -- bin_pack t1, -- bin_provides t2 -- WHERE -- t2.{} -- AND t1.pkgKey = t2.pkgKey -- AND t1.rpm_sourcerpm = s1.src_name; -- """.format(literal_column('name').in_(search_set))) -- bin_set = data_base.session. \ -- execute(sql_string, {'name_{}'.format(i): v -- for i, v in enumerate(search_set, 1)}).fetchall() -- if bin_set: -- for result in bin_set: -- if result.req_name not in not_found_binary: -- LOGGER.logger.warning( -- result.req_name + " contains in two rpm packages!!!") -- else: -- for source_info in not_found_binary[result.req_name]: -- obj = return_tuple( -- source_info[0], -- result.source_name, -- result.bin_name, -- result.version, -- db_name, -- source_info[1] -- ) -- result_list.append(obj) -- del not_found_binary[result.req_name] -- if not not_found_binary: -- return result_list -- except AttributeError as attr_err: -- current_app.logger.error(attr_err) -- except SQLAlchemyError as sql_err: -- current_app.logger.error(sql_err) -+ parm_tuple = namedtuple("in_tuple", 'req_name') -+ in_tuple_list = [parm_tuple(k) for k, _ in not_found_binary.items()] -+ -+ depend_set, req_pk_dict, *_ = self._get_provides_req_info( -+ in_tuple_list, -+ data_base -+ ) -+ -+ depend_info_tuple = namedtuple('depend_info', [ -+ 'depend_name', -+ 'depend_version', -+ 'depend_src_name' -+ ]) -+ depend_info_dict = { -+ info.pk: depend_info_tuple(info.depend_name, -+ info.depend_version, -+ info.depend_src_name) -+ for info in depend_set -+ } -+ result_list += self._comb_install_info(search_set, -+ req_pk_dict, -+ depend_info_dict, -+ not_found_binary, -+ return_tuple, -+ db_name) -+ if not not_found_binary: -+ return result_list - - if not_found_binary: -- for key, values in not_found_binary.items(): -- for info in values: -- obj = return_tuple( -- info[0], -- None, -- None, -- None, -- 'NOT FOUND', -- info[2] -- ) -- result_list.append(obj) -- return result_list -- -- def _get_install_pro_in_other_database(self, not_found_binary): -- if not not_found_binary: -- return [] -- return_tuple = namedtuple('return_tuple', -- 'depend_name depend_version depend_src_name \ -- search_name search_src_name search_version') -- search_list = [] -- result_list = [] -- for db_name, data_base in self.db_object_dict.items(): -- for key, values in not_found_binary.items(): -- search_list.append(key) -- search_set = set(search_list) -- search_list.clear() -- sql_string = text(""" -- SELECT DISTINCT -- s1.name AS source_name, -- t1.NAME AS bin_name, -- t1.version, -- t2.NAME AS req_name -- FROM -- src_pack s1, -- bin_pack t1, -- bin_provides t2 -- WHERE -- t2.{} -- AND t1.pkgKey = t2.pkgKey -- AND t1.rpm_sourcerpm = s1.src_name; -- """.format(literal_column('name').in_(search_set))) -- bin_set = data_base.session. \ -- execute(sql_string, {'name_{}'.format(i): v -- for i, v in enumerate(search_set, 1)}).fetchall() -- if bin_set: -- for result in bin_set: -- if result.req_name not in not_found_binary: -- LOGGER.logger.warning( -- result.req_name + " contains in two rpm packages!!!") -- else: -- for binary_info in not_found_binary[result.req_name]: -- obj = return_tuple( -- result.bin_name, -- result.version, -- result.source_name, -- binary_info[0], -- binary_info[1], -- binary_info[2] -- ) -- result_list.append((obj, binary_info[3])) -- del not_found_binary[result.req_name] -- if not not_found_binary: -- return result_list -- if not_found_binary: -- for key, values in not_found_binary.items(): -+ for _, values in not_found_binary.items(): - for info in values: - obj = return_tuple( - None, -@@ -399,11 +287,52 @@ class SearchDB(): - result_list.append((obj, info[3])) - return result_list - -- def get_build_depend(self, source_name_li): -+ @staticmethod -+ def _comb_install_info(search_set, -+ req_pk_dict, -+ depend_info_dict, -+ not_found_binary, -+ return_tuple, -+ db_name): -+ """ -+ Description: Binary package name data not found in -+ the current database, go to other databases to try -+ Args: -+ search_set: The name of the component to be queried -+ req_pk_dict:Mapping of components and binary pkgKey -+ depend_info_dict:The mapping of binary pkgKey and binary information -+ not_found_binary:not_found_build These data cannot be found in the current database -+ return_tuple:Named tuple format for saving information -+ db_name:current database name -+ Returns: -+ ret_list :[return_tuple1,return_tuple2] package information -+ Raises: -+ """ -+ ret_list = [] -+ for req_name in search_set: -+ if req_name in req_pk_dict: -+ pk_ = req_pk_dict[req_name] -+ if pk_ in depend_info_dict: -+ for binary_info in not_found_binary[req_name]: -+ obj = return_tuple( -+ depend_info_dict[pk_].depend_name, -+ depend_info_dict[pk_].depend_version, -+ depend_info_dict[pk_].depend_src_name, -+ binary_info[0], -+ binary_info[1], -+ binary_info[2] -+ ) -+ ret_list.append((obj, db_name)) -+ del not_found_binary[req_name] -+ return ret_list -+ -+ # Related methods of build -+ def get_build_depend(self, source_name_li, pk_value=None): - """ - Description: get a package build depend from database - Args: - source_name_li: search package's name list -+ pk_value:List of pkgKey found - Returns: - all source pkg build depend list - structure :[(search_name,source_name,bin_name,bin_version,db_name,search_version), -@@ -422,93 +351,428 @@ class SearchDB(): - "db_name", - "search_version" - ]) -- -+ pk_val = pk_value if pk_value else [] - s_name_set = set(source_name_li) - if not s_name_set: -- return ResponseCode.PARAM_ERROR, set() -+ return ResponseCode.PARAM_ERROR, list(), set(), pk_val - - provides_not_found = dict() - build_list = [] - - for db_name, data_base in self.db_object_dict.items(): - -- build_set = [] - try: -- temp_list = list(s_name_set) -- for input_name_li in [temp_list[i:i + 900] for i in range(0, len(temp_list), 900)]: -- sql_com = text(""" -- SELECT DISTINCT -- src.NAME AS search_name, -- src.version AS search_version, -- s1.name AS source_name, -- bin_provides.pkgKey AS bin_id, -- src_requires.NAME AS req_name, -- bin_pack.version AS version, -- bin_pack.NAME AS bin_name -- FROM -- ( SELECT pkgKey, NAME, version FROM src_pack WHERE {}) src -- LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey -- LEFT JOIN bin_provides ON bin_provides.NAME = src_requires.NAME -- LEFT JOIN bin_pack ON bin_pack.pkgKey = bin_provides.pkgKey -- LEFT JOIN src_pack s1 on bin_pack.rpm_sourcerpm=s1.src_name; -- """.format(literal_column("name").in_(input_name_li))) -- res = data_base.session.execute( -- sql_com, -- {'name_{}'.format(i): v -- for i, v in enumerate(input_name_li, 1)} -- ).fetchall() -- -- build_set.extend(res) -+ req_set = self._get_requires(s_name_set, data_base, _tp='build') -+ -+ if not req_set: -+ continue -+ -+ (depend_set, -+ req_pk_dict, -+ pk_v, -+ not_fd_req) = self._get_provides_req_info(req_set, data_base) -+ -+ pk_val += pk_v -+ ret_list, get_list = self._comb_build_list(depend_set, -+ req_pk_dict, -+ not_fd_req, -+ return_tuple, -+ db_name, -+ provides_not_found, -+ req_set) -+ build_list += ret_list -+ s_name_set.symmetric_difference_update(set(get_list)) -+ if not s_name_set: -+ build_list.extend( -+ self._get_binary_in_other_database(provides_not_found, _db_name=db_name) -+ ) -+ return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()), pk_val -+ - except AttributeError as attr_err: - current_app.logger.error(attr_err) - except SQLAlchemyError as sql_err: - current_app.logger.error(sql_err) - -- if not build_set: -+ if s_name_set: -+ build_list.extend( -+ self._get_binary_in_other_database(provides_not_found) -+ ) -+ for source in s_name_set: -+ LOGGER.logger.warning( -+ "CANNOT FOUND THE SOURCE %s in all database", source) -+ -+ return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()), pk_val -+ -+ @staticmethod -+ def _comb_build_list(depend_set, -+ req_pk_dict, -+ not_fd_com, -+ return_tuple, -+ db_name, -+ provides_not_found, -+ req_set): -+ """ -+ Description: Query the corresponding build dependency list -+ through the components of the requirements -+ Args: -+ depend_set: List binary package information corresponding to the components -+ req_pk_dict:Mapping of components and binary pkgKey -+ not_fd_com: List of pkgKey found, -+ return_tuple: Named tuple format for saving information -+ db_name:current database name -+ provides_not_found:Component mapping not found in the current database -+ req_set:Package information and corresponding component information -+ Returns: -+ ret_list:install depend list -+ get_list:Packages that have found results -+ Raises: -+ """ -+ get_list = [] -+ ret_list = [] -+ depend_info_tuple = namedtuple('depend_info', [ -+ 'depend_name', -+ 'depend_version', -+ 'depend_src_name' -+ ]) -+ depend_info_dict = { -+ info.pk: depend_info_tuple(info.depend_name, -+ info.depend_version, -+ info.depend_src_name) -+ for info in depend_set -+ } -+ -+ for req_name, search_name, search_version in req_set: -+ -+ get_list.append(search_name) -+ -+ if not req_name: -+ obj = return_tuple( -+ search_name, -+ None, -+ None, -+ None, -+ db_name, -+ search_version, -+ ) -+ ret_list.append(obj) -+ -+ elif req_name in req_pk_dict: -+ depend_info_t = depend_info_dict.get(req_pk_dict[req_name]) -+ obj = return_tuple( -+ search_name, -+ depend_info_t.depend_src_name, -+ depend_info_t.depend_name, -+ depend_info_t.depend_version, -+ db_name, -+ search_version -+ ) -+ ret_list.append(obj) -+ -+ else: -+ if req_name in not_fd_com: -+ if req_name not in provides_not_found: -+ provides_not_found[req_name] = [ -+ [search_name, -+ search_version, -+ db_name] -+ ] -+ else: -+ provides_not_found[req_name].append([search_name, -+ search_version, -+ db_name]) -+ -+ return ret_list, get_list -+ -+ def _get_binary_in_other_database(self, not_found_binary, _db_name=None): -+ """ -+ Description: Binary package name data not found in -+ the current database, go to other databases to try -+ Args: -+ not_found_binary: not_found_build These data cannot be found in the current database -+ _db_name:current database name -+ Returns: -+ result_list :[return_tuple1,return_tuple2] package information -+ Raises: -+ AttributeError: The object does not have this property -+ SQLAlchemyError: sqlalchemy error -+ """ -+ if not not_found_binary: -+ return [] -+ -+ return_tuple = namedtuple("return_tuple", [ -+ "search_name", -+ "source_name", -+ "bin_name", -+ "version", -+ "db_name", -+ "search_version", -+ ]) -+ -+ result_list = [] -+ search_set = {k for k, _ in not_found_binary.items()} -+ -+ for db_name, data_base in self.db_object_dict.items(): -+ -+ if db_name == _db_name: - continue - -- # When processing source package without compilation dependency -- get_list = [] -- for result in build_set: -- get_list.append(result.search_name) -- if not result.bin_name and result.req_name: -- if result.req_name in provides_not_found: -- provides_not_found[result.req_name].append( -- [result.search_name, result.search_version, db_name] -- ) -- else: -- provides_not_found[result.req_name] = [ -- [result.search_name, result.search_version, db_name] -- ] -- else: -+ in_tuple = namedtuple("in_tuple", 'req_name') -+ in_tuple_list = [in_tuple(k) for k, _ in not_found_binary.items()] -+ -+ depend_set, req_pk_dict, *_ = self._get_provides_req_info( -+ in_tuple_list, -+ data_base -+ ) -+ -+ depend_info_tuple = namedtuple('depend_info', [ -+ 'depend_name', -+ 'depend_version', -+ 'depend_src_name' -+ ]) -+ depend_info_dict = { -+ info.pk: depend_info_tuple(info.depend_name, -+ info.depend_version, -+ info.depend_src_name) -+ for info in depend_set -+ } -+ -+ result_list += self._comb_build_info(search_set, -+ req_pk_dict, -+ depend_info_dict, -+ not_found_binary, -+ return_tuple, -+ db_name) -+ if not not_found_binary: -+ return result_list -+ -+ if not_found_binary: -+ for _, values in not_found_binary.items(): -+ for info in values: - obj = return_tuple( -- result.search_name, -- result.source_name, -- result.bin_name, -- result.version, -- db_name, -- result.search_version -+ info[0], -+ None, -+ None, -+ None, -+ 'NOT FOUND', -+ info[2] - ) -- build_list.append(obj) -+ result_list.append(obj) -+ return result_list - -- get_set = set(get_list) -- get_list.clear() -- s_name_set.symmetric_difference_update(get_set) -- if not s_name_set: -- build_result = self._get_binary_in_other_database( -- provides_not_found) -- build_list.extend(build_result) -- return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()) -+ @staticmethod -+ def _comb_build_info(search_set, -+ req_pk_dict, -+ depend_info_dict, -+ not_found_binary, -+ return_tuple, -+ db_name): -+ """ -+ Description: Binary package name data not found in -+ the current database, go to other databases to try -+ Args: -+ search_set: The name of the component to be queried -+ req_pk_dict:Mapping of components and binary pkgKey -+ depend_info_dict:The mapping of binary pkgKey and binary information -+ not_found_binary:not_found_build These data cannot be found in the current database -+ return_tuple:Named tuple format for saving information, -+ db_name:current data base name -+ Returns: -+ ret_list :[return_tuple1,return_tuple2] package information -+ Raises: -+ """ -+ ret_list = [] -+ for req_name in search_set: -+ if req_name in req_pk_dict: -+ pk_ = req_pk_dict[req_name] -+ if pk_ in depend_info_dict: -+ for binary_info in not_found_binary[req_name]: -+ obj = return_tuple( -+ binary_info[0], -+ depend_info_dict[pk_].depend_src_name, -+ depend_info_dict[pk_].depend_name, -+ depend_info_dict[pk_].depend_version, -+ db_name, -+ binary_info[1] -+ ) -+ ret_list.append(obj) -+ del not_found_binary[req_name] -+ return ret_list - -- if s_name_set: -- build_result = self._get_binary_in_other_database( -- provides_not_found) -- build_list.extend(build_result) -- for source in s_name_set: -- LOGGER.logger.warning( -- "CANNOT FOUND THE source " + source + " in all database") -- return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()) -+ # Common methods for install and build -+ @staticmethod -+ def _get_requires(search_set, data_base, _tp=None): -+ """ -+ Description: Query the dependent components of the current package -+ Args: -+ search_set: The package name to be queried -+ data_base:current database object -+ _tp:type options build or install -+ Returns: -+ req_set:List Package information and corresponding component information -+ Raises: -+ AttributeError: The object does not have this property -+ SQLAlchemyError: sqlalchemy error -+ """ -+ if _tp == 'build': -+ sql_com = text(""" -+ SELECT DISTINCT -+ src_requires.NAME AS req_name, -+ src.NAME AS search_name, -+ src.version AS search_version -+ FROM -+ ( SELECT pkgKey, NAME, version, src_name FROM src_pack WHERE {} ) src -+ LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey; -+ """.format(literal_column('name').in_(search_set))) -+ elif _tp == 'install': -+ sql_com = text(""" -+ SELECT DISTINCT -+ bin_requires.NAME AS req_name, -+ bin.NAME AS search_name, -+ s1.name as search_src_name, -+ bin.version AS search_version -+ FROM -+ ( SELECT pkgKey, NAME, version, rpm_sourcerpm FROM bin_pack WHERE {} ) bin -+ LEFT JOIN src_pack s1 ON bin.rpm_sourcerpm = s1.src_name -+ LEFT JOIN bin_requires ON bin.pkgKey = bin_requires.pkgKey; -+ """.format(literal_column('name').in_(search_set))) -+ else: -+ return [] - -+ req_set = [] -+ try: -+ req_set = data_base.session. \ -+ execute(sql_com, {'name_{}'.format(i): v -+ for i, v in enumerate(search_set, 1)}).fetchall() -+ except AttributeError as error_msg: -+ LOGGER.logger.error(error_msg) -+ except SQLAlchemyError as error_msg: -+ LOGGER.logger.error(error_msg) -+ return req_set -+ -+ def _get_provides_req_info(self, req_info, data_base, pk_value=None): -+ """ -+ Description: Get the name of the binary package -+ that provides the dependent component, -+ Filter redundant queries -+ when the same binary package is provided to multiple components -+ Args: -+ req_info: List of sqlalchemy objects with component names. -+ data_base: The database currently being queried -+ pk_value:Binary pkgKey that has been found -+ Returns: -+ depend_set: List of related dependent sqlalchemy objects -+ req_pk_dict: Mapping dictionary of component name and pkgKey -+ pk_val:update Binary pkgKey that has been found -+ not_fd_req: Components not found -+ Raises: -+ AttributeError: The object does not have this property -+ SQLAlchemyError: sqlalchemy error -+ """ -+ pk_val = pk_value if pk_value else [] -+ depend_set = [] -+ req_pk_dict = {} -+ not_fd_req = set() -+ try: -+ req_names = {req_.req_name -+ for req_ in req_info -+ if req_.req_name is not None} -+ req_name_in = literal_column('name').in_(req_names) -+ -+ sql_com_pro = text(""" -+ SELECT DISTINCT -+ NAME as req_name, -+ pkgKey -+ FROM -+ ( SELECT name, pkgKey FROM bin_provides -+ UNION ALL -+ SELECT name, pkgKey FROM bin_files ) -+ WHERE -+ {}; -+ """.format(req_name_in)) -+ -+ pkg_key_set = data_base.session.execute( -+ sql_com_pro, { -+ 'name_{}'.format(i): v -+ for i, v in enumerate(req_names, 1) -+ } -+ ).fetchall() -+ -+ req_pk_dict = dict() -+ pk_v = list() -+ -+ for req_name, pk_ in pkg_key_set: -+ if not req_name: -+ continue -+ pk_v.append(pk_) -+ if req_name not in req_pk_dict: -+ req_pk_dict[req_name] = [pk_] -+ else: -+ req_pk_dict[req_name].append(pk_) -+ -+ pk_val += pk_v -+ -+ pk_count_dic = Counter(pk_val) -+ -+ for key, values in req_pk_dict.items(): -+ count_values = list(map( -+ lambda x: pk_count_dic[x] if x in pk_count_dic else 0, values -+ )) -+ max_index = count_values.index(max(count_values)) -+ req_pk_dict[key] = values[max_index] -+ -+ not_fd_req = req_names - set(req_pk_dict.keys()) -+ depend_set = self._get_depend_info(req_pk_dict, data_base) -+ -+ except SQLAlchemyError as sql_err: -+ LOGGER.logger.error(sql_err) -+ except AttributeError as error_msg: -+ LOGGER.logger.error(error_msg) -+ -+ return depend_set, req_pk_dict, pk_val, not_fd_req -+ -+ @staticmethod -+ def _get_depend_info(req_pk_dict, data_base): -+ """ -+ Description: Obtain binary related information through binary pkgKey -+ Args: -+ req_pk_dict: Mapping dictionary of component name and pkgKey -+ data_base: The database currently being queried -+ Returns: -+ depend_set: List of related dependent sqlalchemy objects -+ Raises: -+ AttributeError: The object does not have this property -+ SQLAlchemyError: sqlalchemy error -+ """ -+ depend_set = [] -+ try: -+ bin_src_pkg_key = req_pk_dict.values() -+ pk_in = literal_column('pkgKey').in_(bin_src_pkg_key) -+ sql_bin_src = text(""" -+ SELECT DISTINCT -+ bin.pkgKey as pk, -+ bin.name AS depend_name, -+ bin.version AS depend_version, -+ src_pack.name AS depend_src_name -+ FROM -+ ( SELECT name, pkgKey,version, rpm_sourcerpm FROM bin_pack WHERE {} ) bin -+ LEFT JOIN src_pack ON src_pack.src_name = bin.rpm_sourcerpm; -+ """.format(pk_in)) -+ -+ depend_set = data_base.session.execute( -+ sql_bin_src, { -+ 'pkgKey_{}'.format(i): v -+ for i, v in enumerate(bin_src_pkg_key, 1) -+ } -+ ).fetchall() -+ -+ except SQLAlchemyError as sql_err: -+ LOGGER.logger.error(sql_err) -+ except AttributeError as error_msg: -+ LOGGER.logger.error(error_msg) -+ -+ return depend_set -+ -+ # Other methods - def binary_search_database_for_first_time(self, binary_name): - """ - Args: -@@ -553,6 +817,105 @@ class SearchDB(): - - return None, None - -+ def get_src_name(self, binary_name): -+ """ -+ Description: get a package source name from database: -+ bianry_name ->binary_source_name -> source_name -+ Args: -+ binary_name: search package's name, database preority list -+ Returns: -+ db_name: database name -+ source_name: source name -+ source_version: source version -+ Raises: -+ AttributeError: The object does not have this property -+ SQLAlchemyError: sqlalchemy error -+ """ -+ for db_name, data_base in self.db_object_dict.items(): -+ sql_str = """ -+ SELECT DISTINCT -+ src_pack.name AS source_name, -+ src_pack.version AS source_version -+ FROM -+ bin_pack, -+ src_pack -+ WHERE -+ src_pack.src_name = bin_pack.rpm_sourcerpm -+ AND bin_pack.name = :binary_name; -+ """ -+ try: -+ bin_obj = data_base.session.execute(text(sql_str), -+ {"binary_name": binary_name} -+ ).fetchone() -+ source_name = bin_obj.source_name -+ source_version = bin_obj.source_version -+ if source_name is not None: -+ return ResponseCode.SUCCESS, db_name, \ -+ source_name, source_version -+ except AttributeError as error_msg: -+ LOGGER.logger.error(error_msg) -+ except SQLAlchemyError as error_msg: -+ LOGGER.logger.error(error_msg) -+ return ResponseCode.DIS_CONNECTION_DB, None, None, None -+ return ResponseCode.PACK_NAME_NOT_FOUND, None, None, None -+ -+ def get_sub_pack(self, source_name_list): -+ """ -+ Description: get a subpack list based on source name list: -+ source_name ->source_name_id -> binary_name -+ Args: -+ source_name_list: search package's name, database preority list -+ Returns: -+ response code -+ result_list: subpack tuple -+ Raises: -+ AttributeError: The object does not have this property -+ SQLAlchemyError: sqlalchemy error -+ """ -+ if not self.db_object_dict: -+ return ResponseCode.DIS_CONNECTION_DB, None -+ search_set = {source_name for source_name in source_name_list if source_name} -+ result_list = [] -+ get_list = [] -+ if not search_set: -+ return ResponseCode.INPUT_NONE, None -+ for db_name, data_base in self.db_object_dict.items(): -+ try: -+ name_in = literal_column('name').in_(search_set) -+ sql_com = text(''' -+ SELECT -+ bin_pack.name AS subpack_name, -+ bin_pack.version AS sub_pack_version, -+ src.name AS search_name, -+ src.version AS search_version -+ FROM -+ (SELECT name,version,src_name FROM src_pack WHERE {}) src -+ LEFT JOIN bin_pack on src.src_name = bin_pack.rpm_sourcerpm -+ '''.format(name_in)) -+ subpack_tuple = data_base.session. \ -+ execute(sql_com, {'name_{}'.format(i): v -+ for i, v in enumerate(search_set, 1)}).fetchall() -+ if subpack_tuple: -+ for result in subpack_tuple: -+ result_list.append((result, db_name)) -+ get_list.append(result.search_name) -+ search_set.symmetric_difference_update(set(get_list)) -+ get_list.clear() -+ if not search_set: -+ return ResponseCode.SUCCESS, result_list -+ else: -+ continue -+ except AttributeError as attr_error: -+ current_app.logger.error(attr_error) -+ except SQLAlchemyError as sql_error: -+ current_app.logger.error(sql_error) -+ return_tuple = namedtuple( -+ 'return_tuple', 'subpack_name sub_pack_version search_version search_name') -+ for search_name in search_set: -+ result_list.append( -+ (return_tuple(None, None, None, search_name), 'NOT FOUND')) -+ return ResponseCode.SUCCESS, result_list -+ - - def db_priority(): - """ -diff --git a/packageship/application/apps/package/function/self_depend.py b/packageship/application/apps/package/function/self_depend.py -index dd72bed..1ec4c28 100644 ---- a/packageship/application/apps/package/function/self_depend.py -+++ b/packageship/application/apps/package/function/self_depend.py -@@ -8,11 +8,11 @@ class: SelfDepend, DictionaryOperations - - import copy - from packageship.libs.log import Log --from .searchdb import SearchDB --from .constants import ResponseCode --from .constants import ListNode --from .install_depend import InstallDepend as install_depend --from .build_depend import BuildDepend as build_depend -+from packageship.application.apps.package.function.searchdb import SearchDB -+from packageship.application.apps.package.function.constants import ResponseCode, ListNode -+from packageship.application.apps.package.function.install_depend import InstallDepend \ -+ as install_depend -+from packageship.application.apps.package.function.build_depend import BuildDepend as build_depend - - LOGGER = Log(__name__) - -@@ -35,6 +35,8 @@ class SelfDepend(): - search_db: A object of database which would be connected - not_found_components: Contain the package not found components - """ -+ -+ # pylint: disable = R0902 - def __init__(self, db_list): - """ - init class -@@ -72,7 +74,8 @@ class SelfDepend(): - self.withsubpack = withsubpack - response_code = self.init_dict(packname, packtype) - if response_code != ResponseCode.SUCCESS: -- return response_code, self.binary_dict.dictionary, self.source_dicts.dictionary, self.not_found_components -+ return (response_code, self.binary_dict.dictionary, -+ self.source_dicts.dictionary, self.not_found_components) - - for key, _ in self.binary_dict.dictionary.items(): - self.search_install_list.append(key) -@@ -88,7 +91,8 @@ class SelfDepend(): - self.with_subpack() - if self.search_build_list: - self.query_build(selfbuild) -- return response_code, self.binary_dict.dictionary, self.source_dicts.dictionary, self.not_found_components -+ return (response_code, self.binary_dict.dictionary, -+ self.source_dicts.dictionary, self.not_found_components) - - def init_dict(self, packname, packtype): - """ -@@ -105,7 +109,7 @@ class SelfDepend(): - if subpack_list: - for subpack_tuple, dbname in subpack_list: - self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version) -- if dbname != 'NOT_FOUND': -+ if dbname != 'NOT FOUND': - self.binary_dict.append_bin(key=subpack_tuple.subpack_name, - src=packname, - version=subpack_tuple.search_version, -@@ -155,7 +159,8 @@ class SelfDepend(): - db_, src_version_ = self.search_db.get_version_and_db(source_name) - self.source_dicts.append_src(key=source_name, - dbname=db_ if db_ else values[ListNode.DBNAME], -- version=src_version_ if src_version_ else values[ListNode.VERSION]) -+ version=src_version_ -+ if src_version_ else values[ListNode.VERSION]) - self.search_build_list.append(source_name) - if self.withsubpack == 1: - self.search_subpack_list.append(source_name) -@@ -168,13 +173,14 @@ class SelfDepend(): - Raises: - """ - if None in self.search_subpack_list: -- LOGGER.logger.warning("There is a NONE in input value:" + \ -- str(self.search_subpack_list)) -+ LOGGER.logger.warning("There is a NONE in input value: %s", -+ str(self.search_subpack_list)) - self.search_subpack_list.remove(None) - _, result_list = self.search_db.get_sub_pack(self.search_subpack_list) - for subpack_tuple, dbname in result_list: -- if dbname != 'NOT_FOUND': -- if subpack_tuple.subpack_name and subpack_tuple.subpack_name not in self.binary_dict.dictionary: -+ if dbname != 'NOT FOUND': -+ if subpack_tuple.subpack_name and subpack_tuple.subpack_name \ -+ not in self.binary_dict.dictionary: - self.binary_dict.append_bin(key=subpack_tuple.subpack_name, - src=subpack_tuple.search_name, - version=subpack_tuple.sub_pack_version, -@@ -214,7 +220,7 @@ class SelfDepend(): - self.search_build_list.clear() - for key, values in self.result_tmp.items(): - if not key: -- LOGGER.logger.warning("key is NONE for value = " + str(values)) -+ LOGGER.logger.warning("key is NONE for value = %s", str(values)) - continue - if key not in self.binary_dict.dictionary and values[0] != 'source': - self.binary_dict.dictionary[key] = copy.deepcopy(values) -@@ -225,11 +231,13 @@ class SelfDepend(): - db_, src_version_ = self.search_db.get_version_and_db(source_name) - self.source_dicts.append_src(key=source_name, - dbname=db_ if db_ else values[ListNode.DBNAME], -- version=src_version_ if src_version_ else values[ListNode.VERSION]) -+ version=src_version_ -+ if src_version_ else values[ListNode.VERSION]) - if self.withsubpack == 1: - self.search_subpack_list.append(source_name) - elif key in self.binary_dict.dictionary: -- self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST]) -+ self.binary_dict.update_value(key=key, -+ parent_list=values[ListNode.PARENT_LIST]) - - def query_selfbuild(self): - """ -@@ -246,7 +254,7 @@ class SelfDepend(): - self.not_found_components.update(not_fd_com) - for key, values in self.result_tmp.items(): - if not key: -- LOGGER.logger.warning("key is NONE for value = " + str(values)) -+ LOGGER.logger.warning("key is NONE for value = %s", str(values)) - continue - if key in self.binary_dict.dictionary: - self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST]) -@@ -255,11 +263,11 @@ class SelfDepend(): - self.search_install_list.append(key) - for key, values in source_dicts_tmp.items(): - if not key: -- LOGGER.logger.warning("key is NONE for value = " + str(values)) -+ LOGGER.logger.warning("key is NONE for value = %s", str(values)) - continue - if key not in self.source_dicts.dictionary: - self.source_dicts.dictionary[key] = copy.deepcopy(values) -- if self.with_subpack == 1: -+ if self.withsubpack == 1: - self.search_subpack_list.append(key) - self.search_build_list.clear() - -@@ -289,6 +297,7 @@ class DictionaryOperations(): - """ - self.dictionary[key] = [dbname, version] - -+ # pylint: disable=R0913 - def append_bin(self, key, src=None, version=None, dbname=None, parent_node=None): - """ - Description: Appending binary dictionary -diff --git a/packageship/pkgship b/packageship/pkgship -index e19ddc4..9210bd2 100644 ---- a/packageship/pkgship -+++ b/packageship/pkgship -@@ -20,4 +20,4 @@ if __name__ == '__main__': - main() - except Exception as error: - print('Command execution error please try again ') -- print(e.message) -+ print(error.message) -diff --git a/packageship/pkgshipd b/packageship/pkgshipd -index fef39e3..2035b75 100755 ---- a/packageship/pkgshipd -+++ b/packageship/pkgshipd -@@ -12,23 +12,35 @@ fi - - user=$(id | awk '{print $2}' | cut -d = -f 2) - if [ "$user" == "0(root)" ]; then -- echo "[INFO] Current user is root" -+ echo "[INFO] Current user is root." - else -- echo "[ERROR] Current user is not root, the service don't support common user." -+ echo "[ERROR] Current user is not root." - exit 1 - fi - - function check_config_file(){ - echo "[INFO] Check validation of config file." - check_null -- -+ - echo "[INFO] Check validation of ip addresses." - write_port=$(get_config "$service" "write_port") - query_port=$(get_config "$service" "query_port") - write_ip_addr=$(get_config "$service" "write_ip_addr") - query_ip_addr=$(get_config "$service" "query_ip_addr") -- check_addr $write_ip_addr $write_port -- check_addr $query_ip_addr $query_port -+ if [[ -z $write_ip_addr ]]; then -+ echo "[ERROR] The value of below config names is None in: $SYS_PATH/package.ini, Please check these parameters: write_ip_addr" -+ exit 1 -+ else -+ check_addr $write_ip_addr $write_port -+ fi -+ -+ if [[ -z $query_ip_addr ]]; then -+ echo "[ERROR] The value of below config names is None in: $SYS_PATH/package.ini, Please check these parameters: query_ip_addr" -+ exit 1 -+ else -+ check_addr $query_ip_addr $query_port -+ fi -+ - echo "[INFO] IP addresses are all valid." - - echo "[INFO] Check validation of numbers." -@@ -47,8 +59,8 @@ function check_config_file(){ - echo "[INFO] Check validation of words." - log_level=$(get_config "$service" "log_level") - open=$(get_config "$service" "open") -- check_word $log_level "INFO|DEBUG|WARNING|ERROR|CRITICAL" "log_level" -- check_word $open "True|False" "open" -+ check_word "log_level" "INFO|DEBUG|WARNING|ERROR|CRITICAL" $log_level -+ check_word "open" "True|False" $open - echo "[INFO] All words are valid." - - echo "[INFO] Config file checked valid." -@@ -67,7 +79,7 @@ function check_addr(){ - echo "[ERROR] Invalid ip of $1" - exit 1 - fi -- check_num $2 "port" -+ check_num ${2-"port"} "port" - if [[ $2 -gt 65534 || $2 -lt 1025 ]]; then - echo "[ERROR] Invalid port of $2" - exit 1 -@@ -100,16 +112,21 @@ function check_num(){ - } - - function check_word(){ -- result=`echo $1 | grep -wE "$2"` -+ if [ -z $3 ]; then -+ echo "[ERROR] The value of below config names is None in: $SYS_PATH/package.ini, Please check these parameters: $1" -+ exit 1 -+ fi -+ -+ result=`echo $3 | grep -wE "$2"` - if [ $? -ne 0 ]; then -- echo "[ERROR] $3 should be $2." -+ echo "[ERROR] $1 should be $2." - exit 1 - fi - } - - - function get_config(){ -- cat $SYS_PATH/package.ini | grep -E ^$2 | sed s/[[:space:]]//g | awk 'BEGIN{FS="="}{print $2}' -+ cat $SYS_PATH/package.ini | grep -E ^$2 | sed 's/[[:space:]]//g' | awk 'BEGIN{FS="="}{print $2}' - } - - function create_config_file(){ -@@ -120,12 +137,12 @@ function create_config_file(){ - harakiri=$(get_config "$service" "harakiri") - uwsgi_file_path=$(find /usr/lib/ -name "packageship" | head -n 1) - echo "[INFO] run packageship under path: $uwsgi_file_path" -- if [ $service = "manage" -o $service = "all" ];then -+ if [ $service = "manage" -o $service = "all" ]; then - write_port=$(get_config "$service" "write_port") - write_ip_addr=$(get_config "$service" "write_ip_addr") - if [[ -z "$daemonize" ]] || [[ -z "$buffer_size" ]] || [[ -z "$write_ip_addr" ]] || [[ -z "$http_timeout" ]] || [[ -z "$harakiri" ]] || [[ -z "$write_port" ]]; - then -- echo "[ERROR] CAN NOT find all config name in: $SYS_PATH/package.ini, Please check the file" -+ echo "[ERROR] CAN NOT find all config name in: $SYS_PATH/package.ini, Please check the file" - echo "[ERROR] The following config name is needed: daemonize, buffer-size, write_port, write_ip_addr, harakiri and http-timeout" - exit 1 - fi diff --git a/0002-fix-the-problem-of-continuous-spaces.patch b/0002-fix-the-problem-of-continuous-spaces.patch deleted file mode 100644 index f752a3e..0000000 --- a/0002-fix-the-problem-of-continuous-spaces.patch +++ /dev/null @@ -1,255 +0,0 @@ -diff --git a/packageship/application/initsystem/data_import.py b/packageship/application/initsystem/data_import.py -index c2169c1..a5846bd 100644 ---- a/packageship/application/initsystem/data_import.py -+++ b/packageship/application/initsystem/data_import.py -@@ -84,8 +84,8 @@ class InitDataBase(): - - if not os.path.exists(self.config_file_path): - raise FileNotFoundError( -- 'system initialization configuration file \ -- does not exist: %s' % self.config_file_path) -+ "system initialization configuration file" -+ "does not exist: %s" % self.config_file_path) - # load yaml configuration file - with open(self.config_file_path, 'r', encoding='utf-8') as file_context: - try: -@@ -93,24 +93,25 @@ class InitDataBase(): - file_context.read(), Loader=yaml.FullLoader) - except yaml.YAMLError as yaml_error: - -- raise ConfigurationException(' '.join("The format of the yaml configuration\ -- file is wrong please check and try again:{0}".format(yaml_error).split())) -+ raise ConfigurationException( -+ "The format of the yaml configuration" -+ "file is wrong please check and try again:{0}".format(yaml_error)) - - if init_database_config is None: - raise ConfigurationException( - 'The content of the database initialization configuration file cannot be empty') - if not isinstance(init_database_config, list): - raise ConfigurationException( -- ' '.join('The format of the initial database configuration file\ -- is incorrect.When multiple databases need to be initialized, \ -- it needs to be configured in the form of multiple \ -- nodes:{}'.format(self.config_file_path).split())) -+ "The format of the initial database configuration file" -+ "is incorrect.When multiple databases need to be initialized," -+ "it needs to be configured in the form of multiple" -+ "nodes:{}".format(self.config_file_path)) - for config_item in init_database_config: - if not isinstance(config_item, dict): -- raise ConfigurationException(' '.join('The format of the initial database\ -- configuration file is incorrect, and the value in a single node should\ -- be presented in the form of key - val pairs: \ -- {}'.format(self.config_file_path).split())) -+ raise ConfigurationException( -+ "The format of the initial database" -+ "configuration file is incorrect, and the value in a single node should" -+ "be presented in the form of key - val pairs:{}".format(self.config_file_path)) - return init_database_config - - def init_data(self): -@@ -122,8 +123,8 @@ class InitDataBase(): - """ - if getattr(self, 'config_file_datas', None) is None or \ - self.config_file_datas is None: -- raise ContentNoneException('The content of the database initialization \ -- configuration file is empty') -+ raise ContentNoneException("The content of the database initialization" -+ "configuration file is empty") - - if self.__exists_repeat_database(): - raise DatabaseRepeatException( -@@ -139,13 +140,13 @@ class InitDataBase(): - continue - priority = database_config.get('priority') - if not isinstance(priority, int) or priority < 0 or priority > 100: -- LOGGER.logger.error('The priority value type in the database initialization \ -- configuration file is incorrect') -+ LOGGER.logger.error("The priority value type in the database initialization" -+ "configuration file is incorrect") - continue - lifecycle_status_val = database_config.get('lifecycle') - if lifecycle_status_val not in ('enable', 'disable'): -- LOGGER.logger.error('The status value of the life cycle in the initialization\ -- configuration file can only be enable or disable') -+ LOGGER.logger.error("The value of the life cycle in the initialization" -+ "configuration file can only be enable or disable") - continue - # Initialization data - self._init_data(database_config) -@@ -163,8 +164,8 @@ class InitDataBase(): - """ - _database_engine = self._database_engine.get(self.db_type) - if not _database_engine: -- raise Error('The database engine is set incorrectly, \ -- currently only the following engines are supported: %s ' -+ raise Error("The database engine is set incorrectly," -+ "currently only the following engines are supported: %s " - % '、'.join(self._database_engine.keys())) - _create_table_result = _database_engine( - db_name=db_name, tables=tables, storage=storage).create_database(self) -@@ -200,11 +201,12 @@ class InitDataBase(): - - if src_db_file is None or bin_db_file is None: - raise ContentNoneException( -- 'The path to the sqlite file in the database initialization configuration \ -- is incorrect ') -+ "The path to the sqlite file in the database initialization" -+ "configuration is incorrect ") - if not os.path.exists(src_db_file) or not os.path.exists(bin_db_file): -- raise FileNotFoundError("sqlite file {src} or {bin} does not exist, please \ -- check and try again".format(src=src_db_file, bin=bin_db_file)) -+ raise FileNotFoundError( -+ "sqlite file {src} or {bin} does not exist, please" -+ "check and try again".format(src=src_db_file, bin=bin_db_file)) - # 3. Obtain temporary source package files and binary package files - if self.__save_data(database_config, - self.database_name): -@@ -314,23 +316,20 @@ class InitDataBase(): - - Args: - db_name: Saved database name -- Returns: -- -- Raises: -- - """ - # Query all source packages - self.sql = " select * from packages " - packages_datas = self.__get_data() - if packages_datas is None: - raise ContentNoneException( -- '{db_name}:There is no relevant data in the source \ -- package provided '.format(db_name=db_name)) -+ "{db_name}:There is no relevant data in the source " -+ "package provided ".format(db_name=db_name)) - for index, src_package_item in enumerate(packages_datas): - try: - src_package_name = '-'.join([src_package_item.get('name'), - src_package_item.get('version'), -- src_package_item.get('release') + '.src.rpm' -+ src_package_item.get( -+ 'release') + '.src.rpm' - ]) - except AttributeError as exception_msg: - src_package_name = None -@@ -391,8 +390,9 @@ class InitDataBase(): - self.sql = " select * from requires " - requires_datas = self.__get_data() - if requires_datas is None: -- raise ContentNoneException('{db_name}: The package data that the source package \ -- depends on is empty'.format(db_name=db_name)) -+ raise ContentNoneException( -+ "{db_name}: The package data that the source package " -+ "depends on is empty".format(db_name=db_name)) - with DBHelper(db_name=db_name) as database: - database.batch_add(requires_datas, SrcRequires) - -@@ -411,8 +411,8 @@ class InitDataBase(): - bin_packaegs = self.__get_data() - if bin_packaegs is None: - raise ContentNoneException( -- '{db_name}:There is no relevant data in the provided \ -- binary package '.format(db_name=db_name)) -+ "{db_name}:There is no relevant data in the provided " -+ "binary package ".format(db_name=db_name)) - for index, bin_package_item in enumerate(bin_packaegs): - try: - src_package_name = bin_package_item.get('rpm_sourcerpm').split( -@@ -441,8 +441,8 @@ class InitDataBase(): - requires_datas = self.__get_data() - if requires_datas is None: - raise ContentNoneException( -- '{db_name}:There is no relevant data in the provided binary \ -- dependency package'.format(db_name=db_name)) -+ "{db_name}:There is no relevant data in the provided binary " -+ "dependency package".format(db_name=db_name)) - - with DBHelper(db_name=db_name) as database: - database.batch_add(requires_datas, BinRequires) -@@ -462,8 +462,8 @@ class InitDataBase(): - provides_datas = self.__get_data() - if provides_datas is None: - raise ContentNoneException( -- '{db_name}:There is no relevant data in the provided \ -- binary component '.format(db_name=db_name)) -+ "{db_name}:There is no relevant data in the provided " -+ "binary component ".format(db_name=db_name)) - - with DBHelper(db_name=db_name) as database: - database.batch_add(provides_datas, BinProvides) -@@ -474,8 +474,8 @@ class InitDataBase(): - files_datas = self.__get_data() - if files_datas is None: - raise ContentNoneException( -- '{db_name}:There is no relevant binary file installation\ -- path data in the provided database '.format(db_name=db_name)) -+ "{db_name}:There is no relevant binary file installation " -+ "path data in the provided database ".format(db_name=db_name)) - - with DBHelper(db_name=db_name) as database: - database.batch_add(files_datas, BinFiles) -diff --git a/packageship/libs/dbutils/sqlalchemy_helper.py b/packageship/libs/dbutils/sqlalchemy_helper.py -index a0b22e2..d18b115 100644 ---- a/packageship/libs/dbutils/sqlalchemy_helper.py -+++ b/packageship/libs/dbutils/sqlalchemy_helper.py -@@ -279,8 +279,8 @@ class DBHelper(BaseHelper): - - if not isinstance(dicts, list): - raise TypeError( -- 'The input for bulk insertion must be a dictionary \ -- list with the same fields as the current entity') -+ "The input for bulk insertion must be a dictionary" -+ "list with the same fields as the current entity") - try: - self.session.execute( - model.__table__.insert(), -diff --git a/packageship/pkgship.py b/packageship/pkgship.py -index 884b2ab..f9408c8 100644 ---- a/packageship/pkgship.py -+++ b/packageship/pkgship.py -@@ -25,8 +25,8 @@ try: - - LOGGER = Log(__name__) - except ImportError as import_error: -- print('Error importing related dependencies, \ -- please check if related dependencies are installed') -+ print("Error importing related dependencies," -+ "please check if related dependencies are installed") - else: - from packageship.application.apps.package.function.constants import ResponseCode - from packageship.application.apps.package.function.constants import ListNode -@@ -230,7 +230,9 @@ class PkgshipCommand(BaseCommand): - if package_all.get("not_found_components"): - print("Problem: Not Found Components") - for not_found_com in package_all.get("not_found_components"): -- print(" - nothing provides {} needed by {} ".format(not_found_com, params.packagename)) -+ print( -+ " - nothing provides {} needed by {} ". -+ format(not_found_com, params.packagename)) - package_all = package_all.get("build_dict") - - for bin_package, package_depend in package_all.items(): -@@ -835,7 +837,9 @@ class InstallDepCommand(PkgshipCommand): - if package_all.get("not_found_components"): - print("Problem: Not Found Components") - for not_found_com in package_all.get("not_found_components"): -- print(" - nothing provides {} needed by {} ".format(not_found_com, params.packagename)) -+ print( -+ " - nothing provides {} needed by {} ". -+ format(not_found_com, params.packagename)) - for bin_package, package_depend in package_all.get("install_dict").items(): - # distinguish whether the current data is the data of the root node - if isinstance(package_depend, list) and package_depend[-1][0][0] != 'root': -@@ -1061,7 +1065,9 @@ class SelfBuildCommand(PkgshipCommand): - if package_all.get("not_found_components"): - print("Problem: Not Found Components") - for not_found_com in package_all.get("not_found_components"): -- print(" - nothing provides {} needed by {} ".format(not_found_com, params.packagename)) -+ print( -+ " - nothing provides {} needed by {} ". -+ format(not_found_com, params.packagename)) - bin_package_count = self._parse_bin_package( - package_all.get('binary_dicts')) - diff --git a/0003-fix-log_level-configuration-item-not-work.patch b/0003-fix-log_level-configuration-item-not-work.patch deleted file mode 100644 index 3d4b564..0000000 --- a/0003-fix-log_level-configuration-item-not-work.patch +++ /dev/null @@ -1,55 +0,0 @@ -diff --git a/packageship/application/__init__.py b/packageship/application/__init__.py -index 1361058..6a57a2e 100644 ---- a/packageship/application/__init__.py -+++ b/packageship/application/__init__.py -@@ -2,8 +2,6 @@ - """ - Initial operation and configuration of the flask project - """ --import sys --import threading - from flask import Flask - from flask_session import Session - from flask_apscheduler import APScheduler -@@ -19,7 +17,9 @@ def _timed_task(app): - """ - Timed task function - """ -- from .apps.lifecycle.function.download_yaml import update_pkg_info # pylint: disable=import-outside-toplevel -+ # disable=import-outside-toplevel Avoid circular import problems,so import inside the function -+ # pylint: disable=import-outside-toplevel -+ from packageship.application.apps.lifecycle.function.download_yaml import update_pkg_info - - _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) - try: -@@ -34,6 +34,7 @@ def _timed_task(app): - if _minute < 0 or _minute > 59: - _minute = 0 - -+ # disable=no-member Dynamic variable pylint is not recognized - app.apscheduler.add_job( # pylint: disable=no-member - func=update_pkg_info, id="update_package_data", trigger="cron", hour=_hour, minute=_minute) - app.apscheduler.add_job( # pylint: disable=no-member -@@ -52,7 +53,8 @@ def init_app(operation): - app = Flask(__name__) - - # log configuration -- app.logger.addHandler(setup_log(Config)) -+ # disable=no-member Dynamic variable pylint is not recognized -+ app.logger.addHandler(setup_log(Config())) # pylint: disable=no-member - - # Load configuration items - -@@ -66,10 +68,12 @@ def init_app(operation): - # Open session function - Session(app) - -+ # Variables OPERATION need to be modified within the function and imported in other modules - global OPERATION # pylint: disable=global-statement - OPERATION = operation - - # Register Blueprint -+ # disable=import-outside-toplevel Avoid circular import problems,so import inside the function - from packageship.application import apps # pylint: disable=import-outside-toplevel - for blue, api in apps.blue_point: - api.init_app(app) diff --git a/0004-fix-the-error-when-executing-query-commands.patch b/0004-fix-the-error-when-executing-query-commands.patch deleted file mode 100644 index 04552e5..0000000 --- a/0004-fix-the-error-when-executing-query-commands.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff --git a/packageship/application/apps/package/function/packages.py b/packageship/application/apps/package/function/packages.py -index eb96087..d36fc34 100644 ---- a/packageship/application/apps/package/function/packages.py -+++ b/packageship/application/apps/package/function/packages.py -@@ -313,7 +313,8 @@ def _sub_pack(src_name, table_name): - pro_info = res[pro_obj.sub_name]["provides"] - if pro_obj.sub_pro_name in pro_info: - pro_info[pro_obj.sub_pro_name]["requiredby"].update( -- {pro_obj.sub_reqby_name: pro_obj.sub_reqby_name}) -+ {pro_obj.sub_reqby_name: pro_obj.sub_reqby_name} -+ if pro_obj.sub_reqby_name else {}) - else: - pro_info.update( - { -@@ -368,7 +369,8 @@ def _sub_pack(src_name, table_name): - req_info = sub_pkg_info["requires"] - if req_obj.sub_req_name in req_info: - req_info[req_obj.sub_req_name]["providedby"].update( -- {req_obj.sub_proby_name: req_obj.sub_proby_name}) -+ {req_obj.sub_proby_name: req_obj.sub_proby_name} -+ if req_obj.sub_proby_name else {}) - else: - req_info.update( - { diff --git a/0005-fix-the-error-when-source-package-has-no-sub-packages.patch b/0005-fix-the-error-when-source-package-has-no-sub-packages.patch deleted file mode 100644 index 6188dbf..0000000 --- a/0005-fix-the-error-when-source-package-has-no-sub-packages.patch +++ /dev/null @@ -1,62 +0,0 @@ -diff --git a/packageship/application/apps/package/function/self_depend.py b/packageship/application/apps/package/function/self_depend.py -index 1ec4c28..b06b950 100644 ---- a/packageship/application/apps/package/function/self_depend.py -+++ b/packageship/application/apps/package/function/self_depend.py -@@ -106,16 +106,20 @@ class SelfDepend(): - """ - if packtype == 'source': - response_code, subpack_list = self.search_db.get_sub_pack([packname]) -- if subpack_list: -- for subpack_tuple, dbname in subpack_list: -- self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version) -- if dbname != 'NOT FOUND': -- self.binary_dict.append_bin(key=subpack_tuple.subpack_name, -- src=packname, -- version=subpack_tuple.search_version, -- dbname=dbname) -- else: -- return ResponseCode.PACK_NAME_NOT_FOUND -+ if not subpack_list: -+ return ResponseCode.PACK_NAME_NOT_FOUND -+ -+ for subpack_tuple, dbname in subpack_list: -+ self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version) -+ if dbname == 'NOT FOUND': -+ continue -+ -+ if subpack_tuple.subpack_name and subpack_tuple.subpack_name \ -+ not in self.binary_dict.dictionary: -+ self.binary_dict.append_bin(key=subpack_tuple.subpack_name, -+ src=packname, -+ version=subpack_tuple.search_version, -+ dbname=dbname) - - else: - response_code, dbname, source_name, version = \ -@@ -178,15 +182,17 @@ class SelfDepend(): - self.search_subpack_list.remove(None) - _, result_list = self.search_db.get_sub_pack(self.search_subpack_list) - for subpack_tuple, dbname in result_list: -- if dbname != 'NOT FOUND': -- if subpack_tuple.subpack_name and subpack_tuple.subpack_name \ -- not in self.binary_dict.dictionary: -- self.binary_dict.append_bin(key=subpack_tuple.subpack_name, -- src=subpack_tuple.search_name, -- version=subpack_tuple.sub_pack_version, -- dbname=dbname, -- parent_node=[subpack_tuple.search_name, 'Subpack']) -- self.search_install_list.append(subpack_tuple.subpack_name) -+ if dbname == 'NOT FOUND': -+ continue -+ -+ if subpack_tuple.subpack_name and subpack_tuple.subpack_name \ -+ not in self.binary_dict.dictionary: -+ self.binary_dict.append_bin(key=subpack_tuple.subpack_name, -+ src=subpack_tuple.search_name, -+ version=subpack_tuple.sub_pack_version, -+ dbname=dbname, -+ parent_node=[subpack_tuple.search_name, 'Subpack']) -+ self.search_install_list.append(subpack_tuple.subpack_name) - self.search_subpack_list.clear() - - def query_build(self, selfbuild): diff --git a/0006-fix-memory_caused-service-crash-and-data-duplication-issue.patch b/0006-fix-memory_caused-service-crash-and-data-duplication-issue.patch deleted file mode 100644 index 0e4ee66..0000000 --- a/0006-fix-memory_caused-service-crash-and-data-duplication-issue.patch +++ /dev/null @@ -1,3055 +0,0 @@ -diff -Naru a/packageship/application/apps/lifecycle/function/concurrent.py b/packageship/application/apps/lifecycle/function/concurrent.py ---- a/packageship/application/apps/lifecycle/function/concurrent.py 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/application/apps/lifecycle/function/concurrent.py 2020-09-22 23:48:39.938515522 +0800 -@@ -1,65 +1,76 @@ --#!/usr/bin/python3 --""" --Use queues to implement the producer and consumer model --to solve the database lock introduced by high concurrency issues --""" --import threading --from queue import Queue --from sqlalchemy.exc import SQLAlchemyError --from packageship.libs.dbutils import DBHelper --from packageship.libs.exception import Error, ContentNoneException --from packageship.libs.log import Log -- -- --class ProducerConsumer(): -- """ -- The data written in the database is added to the high -- concurrency queue, and the high concurrency is solved -- by the form of the queue -- """ -- _queue = Queue(maxsize=0) -- _instance_lock = threading.Lock() -- _log = Log(__name__) -- -- def __init__(self): -- self.thread_queue = threading.Thread(target=self.__queue_process) -- if not self.thread_queue.isAlive(): -- self.thread_queue.start() -- -- def start_thread(self): -- """ -- Judge a thread, if the thread is terminated, restart -- """ -- if not self.thread_queue.isAlive(): -- self.thread_queue = threading.Thread(target=self.__queue_process) -- self.thread_queue.start() -- -- def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument -- """ -- Use the singleton pattern to create a thread-safe producer pattern -- """ -- if not hasattr(cls, "_instance"): -- with cls._instance_lock: -- if not hasattr(cls, "_instance"): -- cls._instance = object.__new__(cls) -- return cls._instance -- -- def __queue_process(self): -- """ -- Read the content in the queue and save and update -- """ -- while not self._queue.empty(): -- _queue_value = self._queue.get() -- try: -- with DBHelper(db_name="lifecycle") as database: -- database.add(_queue_value) -- except (Error, ContentNoneException, SQLAlchemyError) as error: -- self._log.logger.error(error) -- -- def put(self, pending_content): -- """ -- The content of the operation is added to the queue -- """ -- if pending_content: -- self._queue.put(pending_content) -- self.start_thread() -+#!/usr/bin/python3 -+""" -+Use queues to implement the producer and consumer model -+to solve the database lock introduced by high concurrency issues -+""" -+import threading -+import time -+from queue import Queue -+from sqlalchemy.exc import SQLAlchemyError -+from sqlalchemy.exc import OperationalError -+from packageship.libs.exception import Error, ContentNoneException -+from packageship.libs.log import Log -+from packageship.libs.configutils.readconfig import ReadConfig -+from packageship import system_config -+ -+ -+class ProducerConsumer(): -+ """ -+ The data written in the database is added to the high -+ concurrency queue, and the high concurrency is solved -+ by the form of the queue -+ """ -+ _queue = Queue(maxsize=1000) -+ _instance_lock = threading.Lock() -+ _log = Log(__name__) -+ -+ def __init__(self): -+ self.thread_queue = threading.Thread(target=self.__queue_process) -+ self._instance_lock.acquire() -+ if not self.thread_queue.isAlive(): -+ self.thread_queue = threading.Thread(target=self.__queue_process) -+ self.thread_queue.start() -+ self._instance_lock.release() -+ -+ def start_thread(self): -+ """ -+ Judge a thread, if the thread is terminated, restart -+ """ -+ self._instance_lock.acquire() -+ if not self.thread_queue.isAlive(): -+ self.thread_queue = threading.Thread(target=self.__queue_process) -+ self.thread_queue.start() -+ self._instance_lock.release() -+ -+ def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument -+ """ -+ Use the singleton pattern to create a thread-safe producer pattern -+ """ -+ if not hasattr(cls, "_instance"): -+ with cls._instance_lock: -+ if not hasattr(cls, "_instance"): -+ cls._instance = object.__new__(cls) -+ return cls._instance -+ -+ def __queue_process(self): -+ """ -+ Read the content in the queue and save and update -+ """ -+ while not self._queue.empty(): -+ _queue_value, method = self._queue.get() -+ try: -+ method(_queue_value) -+ except OperationalError as error: -+ self._log.logger.warning(error) -+ time.sleep(0.2) -+ self._queue.put((_queue_value, method)) -+ except (Error, ContentNoneException, SQLAlchemyError) as error: -+ self._log.logger.error(error) -+ -+ def put(self, pending_content): -+ """ -+ The content of the operation is added to the queue -+ """ -+ if pending_content: -+ self._queue.put(pending_content) -+ self.start_thread() -diff -Naru a/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/application/apps/lifecycle/function/download_yaml.py ---- a/packageship/application/apps/lifecycle/function/download_yaml.py 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/application/apps/lifecycle/function/download_yaml.py 2020-09-22 23:48:46.478549707 +0800 -@@ -1,222 +1,224 @@ --#!/usr/bin/python3 --""" --Dynamically obtain the content of the yaml file \ --that saves the package information, periodically \ --obtain the content and save it in the database --""" --import copy --from concurrent.futures import ThreadPoolExecutor --import datetime as date --import requests --import yaml --from retrying import retry --from sqlalchemy.exc import SQLAlchemyError --from requests.exceptions import HTTPError --from packageship import system_config --from packageship.application.models.package import Packages --from packageship.application.models.package import PackagesMaintainer --from packageship.libs.dbutils import DBHelper --from packageship.libs.exception import Error, ContentNoneException --from packageship.libs.configutils.readconfig import ReadConfig --from .base import Base --from .gitee import Gitee --from .concurrent import ProducerConsumer -- -- --class ParseYaml(): -- """ -- Description: Analyze the downloaded remote yaml file, obtain the tags -- and maintainer information in the yaml file, and save the obtained -- relevant information into the database -- -- Attributes: -- base: base class instance -- pkg: Specific package data -- _table_name: The name of the data table to be operated -- openeuler_advisor_url: Get the warehouse address of the yaml file -- _yaml_content: The content of the yaml file -- """ -- -- def __init__(self, pkg_info, base, table_name): -- self.base = base -- self.pkg = pkg_info -- self._table_name = table_name -- self.openeuler_advisor_url = self._path_stitching(pkg_info.name) -- self._yaml_content = None -- self.timed_task_open = self._timed_task_status() -- self.producer_consumer = ProducerConsumer() -- -- def _timed_task_status(self): -- """ -- The open state of information such as the maintainer in the scheduled task -- """ -- _timed_task_status = True -- _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -- open_status = _readconfig.get_config('TIMEDTASK', 'open') -- if open_status not in ('True', 'False'): -- self.base.log.logger.error( -- 'Wrong setting of the open state value of the scheduled task') -- if open_status == 'False': -- self.timed_task_open = False -- return _timed_task_status -- -- def _path_stitching(self, pkg_name): -- """ -- The path of the remote service call -- """ -- _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -- _remote_url = _readconfig.get_config('LIFECYCLE', 'warehouse_remote') -- if _remote_url is None: -- _remote_url = 'https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/' -- return _remote_url + '{pkg_name}.yaml'.format(pkg_name=pkg_name) -- -- def update_database(self): -- """ -- For the current package, determine whether the specific yaml file exists, parse -- the data in it and save it in the database if it exists, and record the relevant -- log if it does not exist -- -- """ -- if self._openeuler_advisor_exists_yaml(): -- self._save_to_database() -- else: -- msg = "The yaml information of the [%s] package has not been" \ -- "obtained yet" % self.pkg.name -- self.base.log.logger.warning(msg) -- -- def _get_yaml_content(self, url): -- """ -- -- """ -- try: -- response = requests.get( -- url, headers=self.base.headers) -- if response.status_code == 200: -- self._yaml_content = yaml.safe_load(response.content) -- -- except HTTPError as error: -- self.base.log.logger.error(error) -- -- def _openeuler_advisor_exists_yaml(self): -- """ -- Determine whether there is a yaml file with the current \ -- package name under the openeuler-advisor project -- -- """ -- self._get_yaml_content(self.openeuler_advisor_url) -- if self._yaml_content: -- return True -- return False -- -- def _save_to_database(self): -- """ -- Save the acquired yaml file information to the database -- -- Raises: -- ContentNoneException: The added entity content is empty -- Error: An error occurred during data addition -- """ -- self._parse_warehouse_info() -- tags = self._yaml_content.get('git_tag', None) -- if tags: -- self._parse_tags_content(tags) -- self.producer_consumer.put(copy.deepcopy(self.pkg)) -- if self.timed_task_open: -- _maintainer = self._yaml_content.get('maintainers') -- if _maintainer and isinstance(_maintainer, list): -- self.pkg.maintainer = _maintainer[0] -- self.pkg.maintainlevel = self._yaml_content.get('maintainlevel') -- try: -- if self.timed_task_open: -- @retry(stop_max_attempt_number=3, stop_max_delay=500) -- def _save_maintainer_info(): -- with DBHelper(db_name="lifecycle") as database: -- _packages_maintainer = database.session.query( -- PackagesMaintainer).filter( -- PackagesMaintainer.name == self.pkg.name).first() -- if _packages_maintainer: -- _packages_maintainer.name = self.pkg.name -- _packages_maintainer.maintainer = self.pkg.maintainer -- _packages_maintainer.maintainlevel = self.pkg.maintainlevel -- else: -- _packages_maintainer = PackagesMaintainer( -- name=self.pkg.name, maintainer=self.pkg.maintainer, -- maintainlevel=self.pkg.maintainlevel) -- self.producer_consumer.put( -- copy.deepcopy(_packages_maintainer)) -- _save_maintainer_info() -- except (Error, ContentNoneException, SQLAlchemyError) as error: -- self.base.log.logger.error(error) -- -- def _parse_warehouse_info(self): -- """ -- Parse the warehouse information in the yaml file -- -- """ -- if self._yaml_content: -- self.pkg.version_control = self._yaml_content.get( -- 'version_control') -- self.pkg.src_repo = self._yaml_content.get('src_repo') -- self.pkg.tag_prefix = self._yaml_content.get('tag_prefix') -- -- def _parse_tags_content(self, tags): -- """ -- Parse the obtained tags content -- -- """ -- try: -- # Integrate tags information into key-value pairs -- _tags = [(tag.split()[0], tag.split()[1]) for tag in tags] -- _tags = sorted(_tags, key=lambda x: x[0], reverse=True) -- self.pkg.latest_version = _tags[0][1] -- self.pkg.latest_version_time = _tags[0][0] -- _end_time = date.datetime.strptime( -- self.pkg.latest_version_time, '%Y-%m-%d') -- if self.pkg.latest_version != self.pkg.version: -- for _version in _tags: -- if _version[1] == self.pkg.version: -- _end_time = date.datetime.strptime( -- _version[0], '%Y-%m-%d') -- self.pkg.used_time = (date.datetime.now() - _end_time).days -- -- except (IndexError, Error) as index_error: -- self.base.log.logger.error(index_error) -- -- --def update_pkg_info(pkg_info_update=True): -- """ -- Update the information of the upstream warehouse in the source package -- -- """ -- try: -- base_control = Base() -- _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -- pool_workers = _readconfig.get_config('LIFECYCLE', 'pool_workers') -- _warehouse = _readconfig.get_config('LIFECYCLE', 'warehouse') -- if _warehouse is None: -- _warehouse = 'src-openeuler' -- if not isinstance(pool_workers, int): -- pool_workers = 10 -- # Open thread pool -- pool = ThreadPoolExecutor(max_workers=pool_workers) -- with DBHelper(db_name="lifecycle") as database: -- for table_name in filter(lambda x: x not in ['packages_issue', 'packages_maintainer'], -- database.engine.table_names()): -- -- cls_model = Packages.package_meta(table_name) -- # Query a specific table -- for package_item in database.session.query(cls_model).all(): -- if pkg_info_update: -- parse_yaml = ParseYaml( -- pkg_info=copy.deepcopy(package_item), -- base=base_control, -- table_name=table_name) -- pool.submit(parse_yaml.update_database) -- else: -- # Get the issue of each warehouse and save it -- gitee_issue = Gitee( -- package_item, _warehouse, package_item.name, table_name) -- pool.submit(gitee_issue.query_issues_info) -- pool.shutdown() -- except SQLAlchemyError as error_msg: -- base_control.log.logger.error(error_msg) -+#!/usr/bin/python3 -+""" -+Dynamically obtain the content of the yaml file \ -+that saves the package information, periodically \ -+obtain the content and save it in the database -+""" -+import copy -+from concurrent.futures import ThreadPoolExecutor -+import datetime as date -+import requests -+import yaml -+from retrying import retry -+from sqlalchemy.exc import SQLAlchemyError -+from requests.exceptions import HTTPError -+from packageship import system_config -+from packageship.application.models.package import Packages -+from packageship.application.models.package import PackagesMaintainer -+from packageship.libs.dbutils import DBHelper -+from packageship.libs.exception import Error, ContentNoneException -+from packageship.libs.configutils.readconfig import ReadConfig -+from .base import Base -+from .gitee import Gitee -+from .concurrent import ProducerConsumer -+ -+ -+class ParseYaml(): -+ """ -+ Description: Analyze the downloaded remote yaml file, obtain the tags -+ and maintainer information in the yaml file, and save the obtained -+ relevant information into the database -+ -+ Attributes: -+ base: base class instance -+ pkg: Specific package data -+ _table_name: The name of the data table to be operated -+ openeuler_advisor_url: Get the warehouse address of the yaml file -+ _yaml_content: The content of the yaml file -+ """ -+ -+ def __init__(self, pkg_info, base, table_name): -+ self.base = base -+ self.pkg = pkg_info -+ self._table_name = table_name -+ self.openeuler_advisor_url = self._path_stitching(pkg_info.name) -+ self._yaml_content = None -+ self.timed_task_open = self._timed_task_status() -+ self.producer_consumer = ProducerConsumer() -+ -+ def _timed_task_status(self): -+ """ -+ The open state of information such as the maintainer in the scheduled task -+ """ -+ _timed_task_status = True -+ _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -+ open_status = _readconfig.get_config('TIMEDTASK', 'open') -+ if open_status not in ('True', 'False'): -+ self.base.log.logger.error( -+ 'Wrong setting of the open state value of the scheduled task') -+ if open_status == 'False': -+ self.timed_task_open = False -+ return _timed_task_status -+ -+ def _path_stitching(self, pkg_name): -+ """ -+ The path of the remote service call -+ """ -+ _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -+ _remote_url = _readconfig.get_config('LIFECYCLE', 'warehouse_remote') -+ if _remote_url is None: -+ _remote_url = 'https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/' -+ return _remote_url + '{pkg_name}.yaml'.format(pkg_name=pkg_name) -+ -+ def update_database(self): -+ """ -+ For the current package, determine whether the specific yaml file exists, parse -+ the data in it and save it in the database if it exists, and record the relevant -+ log if it does not exist -+ -+ """ -+ if self._openeuler_advisor_exists_yaml(): -+ self._save_to_database() -+ else: -+ msg = "The yaml information of the [%s] package has not been" \ -+ "obtained yet" % self.pkg.name -+ self.base.log.logger.warning(msg) -+ -+ def _get_yaml_content(self, url): -+ """ -+ -+ """ -+ try: -+ response = requests.get( -+ url, headers=self.base.headers) -+ if response.status_code == 200: -+ self._yaml_content = yaml.safe_load(response.content) -+ -+ except HTTPError as error: -+ self.base.log.logger.error(error) -+ -+ def _openeuler_advisor_exists_yaml(self): -+ """ -+ Determine whether there is a yaml file with the current \ -+ package name under the openeuler-advisor project -+ -+ """ -+ self._get_yaml_content(self.openeuler_advisor_url) -+ if self._yaml_content: -+ return True -+ return False -+ -+ def _save_to_database(self): -+ """ -+ Save the acquired yaml file information to the database -+ -+ Raises: -+ ContentNoneException: The added entity content is empty -+ Error: An error occurred during data addition -+ """ -+ -+ def _save_package(package_module): -+ with DBHelper(db_name="lifecycle") as database: -+ database.add(package_module) -+ -+ def _save_maintainer_info(maintainer_module): -+ with DBHelper(db_name="lifecycle") as database: -+ _packages_maintainer = database.session.query( -+ PackagesMaintainer).filter( -+ PackagesMaintainer.name == maintainer_module['name']).first() -+ if _packages_maintainer: -+ for key, val in maintainer_module.items(): -+ setattr(_packages_maintainer, key, val) -+ else: -+ _packages_maintainer = PackagesMaintainer( -+ **maintainer_module) -+ database.add(_packages_maintainer) -+ -+ self._parse_warehouse_info() -+ tags = self._yaml_content.get('git_tag', None) -+ if tags: -+ self._parse_tags_content(tags) -+ self.producer_consumer.put( -+ (copy.deepcopy(self.pkg), _save_package)) -+ if self.timed_task_open: -+ maintainer = {'name': self.pkg.name} -+ _maintainer = self._yaml_content.get('maintainers') -+ if _maintainer and isinstance(_maintainer, list): -+ maintainer['maintainer'] = _maintainer[0] -+ maintainer['maintainlevel'] = self._yaml_content.get( -+ 'maintainlevel') -+ -+ self.producer_consumer.put((maintainer, _save_maintainer_info)) -+ -+ def _parse_warehouse_info(self): -+ """ -+ Parse the warehouse information in the yaml file -+ -+ """ -+ if self._yaml_content: -+ self.pkg.version_control = self._yaml_content.get( -+ 'version_control') -+ self.pkg.src_repo = self._yaml_content.get('src_repo') -+ self.pkg.tag_prefix = self._yaml_content.get('tag_prefix') -+ -+ def _parse_tags_content(self, tags): -+ """ -+ Parse the obtained tags content -+ -+ """ -+ try: -+ # Integrate tags information into key-value pairs -+ _tags = [(tag.split()[0], tag.split()[1]) for tag in tags] -+ _tags = sorted(_tags, key=lambda x: x[0], reverse=True) -+ self.pkg.latest_version = _tags[0][1] -+ self.pkg.latest_version_time = _tags[0][0] -+ _end_time = date.datetime.strptime( -+ self.pkg.latest_version_time, '%Y-%m-%d') -+ if self.pkg.latest_version != self.pkg.version: -+ for _version in _tags: -+ if _version[1] == self.pkg.version: -+ _end_time = date.datetime.strptime( -+ _version[0], '%Y-%m-%d') -+ self.pkg.used_time = (date.datetime.now() - _end_time).days -+ -+ except (IndexError, Error) as index_error: -+ self.base.log.logger.error(index_error) -+ -+ -+def update_pkg_info(pkg_info_update=True): -+ """ -+ Update the information of the upstream warehouse in the source package -+ -+ """ -+ try: -+ base_control = Base() -+ _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -+ pool_workers = _readconfig.get_config('LIFECYCLE', 'pool_workers') -+ _warehouse = _readconfig.get_config('LIFECYCLE', 'warehouse') -+ if _warehouse is None: -+ _warehouse = 'src-openeuler' -+ if not isinstance(pool_workers, int): -+ pool_workers = 10 -+ # Open thread pool -+ pool = ThreadPoolExecutor(max_workers=pool_workers) -+ with DBHelper(db_name="lifecycle") as database: -+ for table_name in filter(lambda x: x not in ['packages_issue', 'packages_maintainer', 'database_info'], -+ database.engine.table_names()): -+ -+ cls_model = Packages.package_meta(table_name) -+ # Query a specific table -+ for package_item in database.session.query(cls_model).all(): -+ if pkg_info_update: -+ parse_yaml = ParseYaml( -+ pkg_info=copy.deepcopy(package_item), -+ base=base_control, -+ table_name=table_name) -+ pool.submit(parse_yaml.update_database) -+ else: -+ # Get the issue of each warehouse and save it -+ gitee_issue = Gitee( -+ copy.deepcopy(package_item), _warehouse, package_item.name, table_name) -+ pool.submit(gitee_issue.query_issues_info) -+ pool.shutdown() -+ except SQLAlchemyError as error_msg: -+ base_control.log.logger.error(error_msg) -diff -Naru a/packageship/application/apps/lifecycle/function/gitee.py b/packageship/application/apps/lifecycle/function/gitee.py ---- a/packageship/application/apps/lifecycle/function/gitee.py 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/application/apps/lifecycle/function/gitee.py 2020-09-22 23:48:52.698582219 +0800 -@@ -1,224 +1,223 @@ --#!/usr/bin/python3 --""" --Description:Get issue info from gitee --Class: Gitee --""" --import copy --from json import JSONDecodeError --from retrying import retry --import requests --from requests.exceptions import HTTPError --from sqlalchemy.exc import SQLAlchemyError --from packageship.libs.dbutils import DBHelper --from packageship.libs.configutils.readconfig import ReadConfig --from packageship.libs.exception import Error, ContentNoneException --from packageship.application.models.package import PackagesIssue --from packageship import system_config --from packageship.libs.log import Log --from .concurrent import ProducerConsumer -- --LOGGER = Log(__name__) -- -- --class Gitee(): -- """ -- gitee version management tool related information acquisition -- -- """ -- -- def __init__(self, pkg_info, owner, repo, table_name): -- self.pkg_info = pkg_info -- self.owner = owner -- self.repo = repo -- self._read_config = ReadConfig(system_config.SYS_CONFIG_PATH) -- self.url = "https://gitee.com/" -- self.api_url = "https://gitee.com/api/v5/repos" -- self.pool = None -- self.issue_id = None -- self.defect = 0 -- self.feature = 0 -- self.cve = 0 -- self.patch_files_path = self._read_config.get_system( -- "patch_files_path") -- self.table_name = table_name -- self.producer_consumer = ProducerConsumer() -- -- def query_issues_info(self, issue_id=""): -- """ -- Description: View the issue details of the specified package -- Args: -- issue_id: Issue id -- Returns: -- issue_content_list: The issue details of the specified package list -- Raises: -- -- """ -- issue_url = self.api_url + \ -- "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id) -- try: -- response = requests.get( -- issue_url, params={"state": "all", "per_page": 100}) -- except Error as error: -- LOGGER.logger.error(error) -- return None -- if response.status_code != 200: -- return None -- total_page = 1 if issue_id else int(response.headers['total_page']) -- total_count = int(response.headers['total_count']) -- if total_count > 0: -- issue_list = self._query_per_page_issue_info(total_page, issue_url) -- if not issue_list: -- LOGGER.logger.error( -- "An error occurred while querying {}".format(self.repo)) -- return None -- self._save_issues(issue_list) -- -- def _query_per_page_issue_info(self, total_page, issue_url): -- """ -- Description: View the issue details -- Args: -- total_page: total page -- issue_url: issue url -- -- Returns: -- -- """ -- issue_content_list = [] -- for i in range(1, total_page + 1): -- -- @retry(stop_max_attempt_number=3, stop_max_delay=1000) -- def request_issue(page, issue_url): -- try: -- response = requests.get(issue_url, -- params={"state": "all", "per_page": 100, "page": page}) -- except HTTPError: -- raise HTTPError('Network request error') -- return response -- -- try: -- response = request_issue(i, issue_url) -- if response.status_code != 200: -- LOGGER.logger.warning(response.content.decode("utf-8")) -- continue -- issue_content_list.extend( -- self.parse_issues_content(response.json())) -- except (JSONDecodeError, Error) as error: -- LOGGER.logger.error(error) -- return issue_content_list -- -- def _save_issues(self, issue_list): -- """ -- Save the obtained issue information -- -- """ -- try: -- issue_ids = [issue['issue_id'] for issue in issue_list] -- with DBHelper(db_name="lifecycle") as database: -- -- @retry(stop_max_attempt_number=3, stop_max_delay=500) -- def _query_pkgissues(): -- exist_issues = database.session.query(PackagesIssue).filter( -- PackagesIssue.issue_id.in_(issue_ids)).all() # pylint: disable=protected-access -- return exist_issues -- -- exist_issues = _query_pkgissues() -- # Save the issue -- for issue_item in issue_list: -- issue_model = [ -- issue for issue in exist_issues if issue.issue_id == issue_item['issue_id']] -- if issue_model: -- for key, val in issue_item.items(): -- setattr(issue_model[0], key, val) -- self.producer_consumer.put( -- copy.deepcopy(issue_model[0])) -- else: -- self.producer_consumer.put( -- PackagesIssue(**issue_item)) -- -- # The number of various issues in the update package -- self.pkg_info.defect = self.defect -- self.pkg_info.feature = self.feature -- self.pkg_info.cve = self.cve -- self.producer_consumer.put(copy.deepcopy(self.pkg_info)) -- -- except (Error, ContentNoneException, SQLAlchemyError) as error: -- LOGGER.logger.error( -- 'An abnormal error occurred while saving related issues:%s' % error if error else '') -- -- def parse_issues_content(self, sources): -- """ -- Description: Parse the response content and get issue content -- Args:Issue list -- -- Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download -- Raises: -- """ -- result_list = [] -- if isinstance(sources, list): -- for source in sources: -- issue_content = self.parse_issue_content(source) -- if issue_content: -- result_list.append(issue_content) -- else: -- issue_content = self.parse_issue_content(sources) -- if issue_content: -- result_list.append(issue_content) -- return result_list -- -- def parse_issue_content(self, source): -- """ -- Description: Parse the response content and get issue content -- Args: Source of issue content -- -- Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download, issue_status -- issue_type, related_release -- Raises:KeyError -- """ -- try: -- result_dict = {"issue_id": source['number'], "issue_url": source['html_url'], -- "issue_title": source['title'].strip(), -- "issue_content": source['body'].strip(), -- "issue_status": source['state'], "issue_download": "", -- "issue_type": source["issue_type"], -- "pkg_name": self.repo, -- "related_release": source["labels"][0]['name'] if source["labels"] else ''} -- if source["issue_type"] == "缺陷": -- self.defect += 1 -- elif source["issue_type"] == "需求": -- self.feature += 1 -- elif source["issue_type"] == "CVE和安全问题": -- self.cve += 1 -- else: -- pass -- except KeyError as error: -- LOGGER.logger.error(error) -- return None -- return result_dict -- -- def issue_hooks(self, issue_hook_info): -- """ -- Description: Hook data triggered by a new task operation -- Args: -- issue_hook_info: Issue info -- Returns: -- -- Raises: -- -- """ -- if issue_hook_info is None: -- raise ContentNoneException( -- 'The content cannot be empty') -- issue_info_list = [] -- issue_info = issue_hook_info["issue"] -- issue_content = self.parse_issue_content(issue_info) -- if issue_content: -- issue_info_list.append(issue_content) -- if self.feature != 0: -- self.defect, self.feature, self.cve = self.pkg_info.defect, self.pkg_info.feature + \ -- 1, self.pkg_info.cve -- if self.defect != 0: -- self.defect, self.feature, self.cve = self.pkg_info.defect + \ -- 1, self.pkg_info.feature, self.pkg_info.cve -- if self.cve != 0: -- self.defect, self.feature, self.cve = self.pkg_info.defect, self.pkg_info.feature, self.pkg_info.cve + 1 -- self._save_issues(issue_info_list) -+#!/usr/bin/python3 -+""" -+Description:Get issue info from gitee -+Class: Gitee -+""" -+import copy -+from json import JSONDecodeError -+from retrying import retry -+import requests -+from requests.exceptions import HTTPError -+from sqlalchemy.exc import SQLAlchemyError -+from packageship.libs.dbutils import DBHelper -+from packageship.libs.configutils.readconfig import ReadConfig -+from packageship.libs.exception import Error, ContentNoneException -+from packageship.application.models.package import PackagesIssue -+from packageship import system_config -+from packageship.libs.log import Log -+from .concurrent import ProducerConsumer -+ -+LOGGER = Log(__name__) -+ -+ -+class Gitee(): -+ """ -+ gitee version management tool related information acquisition -+ -+ """ -+ -+ def __init__(self, pkg_info, owner, repo, table_name): -+ self.pkg_info = pkg_info -+ self.owner = owner -+ self.repo = repo -+ self._read_config = ReadConfig(system_config.SYS_CONFIG_PATH) -+ self.url = "https://gitee.com/" -+ self.api_url = "https://gitee.com/api/v5/repos" -+ self.pool = None -+ self.issue_id = None -+ self.defect = 0 -+ self.feature = 0 -+ self.cve = 0 -+ self.patch_files_path = self._read_config.get_system( -+ "patch_files_path") -+ self.table_name = table_name -+ self.producer_consumer = ProducerConsumer() -+ -+ def query_issues_info(self, issue_id=""): -+ """ -+ Description: View the issue details of the specified package -+ Args: -+ issue_id: Issue id -+ Returns: -+ issue_content_list: The issue details of the specified package list -+ Raises: -+ -+ """ -+ issue_url = self.api_url + \ -+ "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id) -+ try: -+ response = requests.get( -+ issue_url, params={"state": "all", "per_page": 100}) -+ except Error as error: -+ LOGGER.logger.error(error) -+ return None -+ if response.status_code != 200: -+ return None -+ total_page = 1 if issue_id else int(response.headers['total_page']) -+ total_count = int(response.headers['total_count']) -+ if total_count > 0: -+ issue_list = self._query_per_page_issue_info(total_page, issue_url) -+ if not issue_list: -+ LOGGER.logger.error( -+ "An error occurred while querying {}".format(self.repo)) -+ return None -+ self._save_issues(issue_list) -+ -+ def _query_per_page_issue_info(self, total_page, issue_url): -+ """ -+ Description: View the issue details -+ Args: -+ total_page: total page -+ issue_url: issue url -+ -+ Returns: -+ -+ """ -+ issue_content_list = [] -+ for i in range(1, total_page + 1): -+ -+ @retry(stop_max_attempt_number=3, stop_max_delay=1000) -+ def request_issue(page, issue_url): -+ try: -+ response = requests.get(issue_url, -+ params={"state": "all", "per_page": 100, "page": page}) -+ except HTTPError: -+ raise HTTPError('Network request error') -+ return response -+ -+ try: -+ response = request_issue(i, issue_url) -+ if response.status_code != 200: -+ LOGGER.logger.warning(response.content.decode("utf-8")) -+ continue -+ issue_content_list.extend( -+ self.parse_issues_content(response.json())) -+ except (JSONDecodeError, Error) as error: -+ LOGGER.logger.error(error) -+ return issue_content_list -+ -+ def _save_issues(self, issue_list): -+ """ -+ Save the obtained issue information -+ -+ """ -+ try: -+ def _save(issue_module): -+ with DBHelper(db_name='lifecycle') as database: -+ -+ exist_issues = database.session.query(PackagesIssue).filter( -+ PackagesIssue.issue_id == issue_module['issue_id']).first() -+ if exist_issues: -+ -+ # Save the issue -+ for key, val in issue_module.items(): -+ setattr(exist_issues, key, val) -+ else: -+ exist_issues = PackagesIssue(**issue_module) -+ database.add(exist_issues) -+ -+ def _save_package(package_module): -+ with DBHelper(db_name='lifecycle') as database: -+ database.add(package_module) -+ -+ for issue_item in issue_list: -+ self.producer_consumer.put( -+ (copy.deepcopy(issue_item), _save)) -+ -+ # The number of various issues in the update package -+ self.pkg_info.defect = self.defect -+ self.pkg_info.feature = self.feature -+ self.pkg_info.cve = self.cve -+ self.producer_consumer.put((copy.deepcopy(self.pkg_info), _save_package)) -+ -+ except (Error, ContentNoneException, SQLAlchemyError) as error: -+ LOGGER.logger.error( -+ 'An abnormal error occurred while saving related issues:%s' % error if error else '') -+ -+ def parse_issues_content(self, sources): -+ """ -+ Description: Parse the response content and get issue content -+ Args:Issue list -+ -+ Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download -+ Raises: -+ """ -+ result_list = [] -+ if isinstance(sources, list): -+ for source in sources: -+ issue_content = self.parse_issue_content(source) -+ if issue_content: -+ result_list.append(issue_content) -+ else: -+ issue_content = self.parse_issue_content(sources) -+ if issue_content: -+ result_list.append(issue_content) -+ return result_list -+ -+ def parse_issue_content(self, source): -+ """ -+ Description: Parse the response content and get issue content -+ Args: Source of issue content -+ -+ Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download, issue_status -+ issue_type, related_release -+ Raises:KeyError -+ """ -+ try: -+ result_dict = {"issue_id": source['number'], "issue_url": source['html_url'], -+ "issue_title": source['title'].strip(), -+ "issue_content": source['body'].strip(), -+ "issue_status": source['state'], "issue_download": "", -+ "issue_type": source["issue_type"], -+ "pkg_name": self.repo, -+ "related_release": source["labels"][0]['name'] if source["labels"] else ''} -+ if source["issue_type"] == "缺陷": -+ self.defect += 1 -+ elif source["issue_type"] == "需求": -+ self.feature += 1 -+ elif source["issue_type"] == "CVE和安全问题": -+ self.cve += 1 -+ else: -+ pass -+ except KeyError as error: -+ LOGGER.logger.error(error) -+ return None -+ return result_dict -+ -+ def issue_hooks(self, issue_hook_info): -+ """ -+ Description: Hook data triggered by a new task operation -+ Args: -+ issue_hook_info: Issue info -+ Returns: -+ -+ Raises: -+ -+ """ -+ if issue_hook_info is None: -+ raise ContentNoneException( -+ 'The content cannot be empty') -+ issue_info_list = [] -+ issue_info = issue_hook_info["issue"] -+ issue_content = self.parse_issue_content(issue_info) -+ if issue_content: -+ issue_info_list.append(issue_content) -+ if self.feature != 0: -+ self.defect, self.feature, self.cve = self.pkg_info.defect, self.pkg_info.feature + \ -+ 1, self.pkg_info.cve -+ if self.defect != 0: -+ self.defect, self.feature, self.cve = self.pkg_info.defect + \ -+ 1, self.pkg_info.feature, self.pkg_info.cve -+ if self.cve != 0: -+ self.defect, self.feature, self.cve = self.pkg_info.defect, self.pkg_info.feature, self.pkg_info.cve + 1 -+ self._save_issues(issue_info_list) -diff -Naru a/packageship/application/apps/lifecycle/view.py b/packageship/application/apps/lifecycle/view.py ---- a/packageship/application/apps/lifecycle/view.py 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/application/apps/lifecycle/view.py 2020-09-22 23:52:49.731821183 +0800 -@@ -1,760 +1,760 @@ --#!/usr/bin/python3 --""" --Life cycle related api interface --""" --import io --import json --import math --import os --from concurrent.futures import ThreadPoolExecutor -- --import pandas as pd --import yaml -- --from flask import request --from flask import jsonify, make_response --from flask import current_app --from flask_restful import Resource --from marshmallow import ValidationError -- --from sqlalchemy.exc import DisconnectionError, SQLAlchemyError -- --from packageship import system_config --from packageship.libs.configutils.readconfig import ReadConfig --from packageship.libs.exception import Error --from packageship.application.apps.package.function.constants import ResponseCode --from packageship.libs.dbutils.sqlalchemy_helper import DBHelper --from packageship.application.models.package import PackagesIssue --from packageship.application.models.package import Packages --from packageship.application.models.package import PackagesMaintainer --from packageship.libs.log import Log --from .serialize import IssueDownloadSchema, PackagesDownloadSchema, IssuePageSchema, IssueSchema --from ..package.serialize import DataFormatVerfi, UpdatePackagesSchema --from .function.gitee import Gitee as gitee -- --LOGGER = Log(__name__) -- -- --# pylint: disable = no-self-use -- --class DownloadFile(Resource): -- """ -- Download the content of the issue or the excel file of the package content -- """ -- -- def _download_excel(self, file_type, table_name=None): -- """ -- Download excel file -- """ -- file_name = 'packages.xlsx' -- if file_type == 'packages': -- download_content = self.__get_packages_content(table_name) -- else: -- file_name = 'issues.xlsx' -- download_content = self.__get_issues_content() -- if download_content is None: -- return jsonify( -- ResponseCode.response_json( -- ResponseCode.SERVICE_ERROR)) -- pd_dataframe = self.__to_dataframe(download_content) -- -- _response = self.__bytes_save(pd_dataframe) -- return self.__set_response_header(_response, file_name) -- -- def __bytes_save(self, data_frame): -- """ -- Save the file content in the form of a binary file stream -- """ -- try: -- bytes_io = io.BytesIO() -- writer = pd.ExcelWriter( # pylint: disable=abstract-class-instantiated -- bytes_io, engine='xlsxwriter') -- data_frame.to_excel(writer, sheet_name='Summary', index=False) -- writer.save() -- writer.close() -- bytes_io.seek(0) -- _response = make_response(bytes_io.getvalue()) -- bytes_io.close() -- return _response -- except (IOError, Error) as io_error: -- current_app.logger.error(io_error) -- return make_response() -- -- def __set_response_header(self, response, file_name): -- """ -- Set http response header information -- """ -- response.headers['Content-Type'] = \ -- "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" -- response.headers["Cache-Control"] = "no-cache" -- response.headers['Content-Disposition'] = 'attachment; filename={file_name}'.format( -- file_name=file_name) -- return response -- -- def __get_packages_content(self, table_name): -- """ -- Get package list information -- """ -- try: -- with DBHelper(db_name='lifecycle') as database: -- # Query all package data in the specified table -- _model = Packages.package_meta(table_name) -- _packageinfos = database.session.query(_model).all() -- packages_dicts = PackagesDownloadSchema( -- many=True).dump(_packageinfos) -- return packages_dicts -- -- except (SQLAlchemyError, DisconnectionError) as error: -- current_app.logger.error(error) -- return None -- -- def __get_issues_content(self): -- """ -- Get the list of issues -- """ -- try: -- with DBHelper(db_name='lifecycle') as database: -- _issues = database.session.query(PackagesIssue).all() -- issues_dicts = IssueDownloadSchema(many=True).dump(_issues) -- return issues_dicts -- except (SQLAlchemyError, DisconnectionError) as error: -- current_app.logger.error(error) -- return None -- -- def __to_dataframe(self, datas): -- """ -- Convert the obtained information into pandas content format -- """ -- -- data_frame = pd.DataFrame(datas) -- return data_frame -- -- def get(self, file_type): -- """ -- Download package collection information and isse list information -- -- """ -- if file_type not in ['packages', 'issues']: -- return jsonify( -- ResponseCode.response_json( -- ResponseCode.PARAM_ERROR)) -- -- table_name = request.args.get('table_name', None) -- response = self._download_excel(file_type, table_name) -- return response -- -- --class MaintainerView(Resource): -- """ -- Maintainer name collection -- """ -- -- def __query_maintainers(self): -- """ -- Query the names of all maintainers in the specified table -- """ -- try: -- with DBHelper(db_name='lifecycle') as database: -- maintainers = database.session.query( -- PackagesMaintainer.maintainer).group_by(PackagesMaintainer.maintainer).all() -- return [maintainer_item[0] for maintainer_item in maintainers -- if maintainer_item[0]] -- except (SQLAlchemyError, DisconnectionError) as error: -- current_app.logger.error(error) -- return [] -- -- def get(self): -- """ -- Get the list of maintainers -- """ -- # Group query of the names of all maintainers in the current table -- maintainers = self.__query_maintainers() -- return jsonify(ResponseCode.response_json( -- ResponseCode.SUCCESS, -- maintainers)) -- -- --class TableColView(Resource): -- """ -- The default column of the package shows the interface -- """ -- -- def __columns_names(self): -- """ -- Mapping of column name and title -- """ -- columns = [ -- ('name', 'Name', True), -- ('version', 'Version', True), -- ('release', 'Release', True), -- ('url', 'Url', True), -- ('rpm_license', 'License', False), -- ('feature', 'Feature', False), -- ('maintainer', 'Maintainer', True), -- ('maintainlevel', 'Maintenance Level', True), -- ('release_time', 'Release Time', False), -- ('used_time', 'Used Time', True), -- ('maintainer_status', 'Maintain Status', True), -- ('latest_version', 'Latest Version', False), -- ('latest_version_time', 'Latest Version Release Time', False), -- ('issue', 'Issue', True)] -- return columns -- -- def __columns_mapping(self): -- """ -- -- """ -- columns = list() -- for column in self.__columns_names(): -- columns.append({ -- 'column_name': column[0], -- 'label': column[1], -- 'default_selected': column[2] -- }) -- return columns -- -- def get(self): -- """ -- Get the default display column of the package -- -- """ -- table_mapping_columns = self.__columns_mapping() -- return jsonify( -- ResponseCode.response_json( -- ResponseCode.SUCCESS, -- table_mapping_columns)) -- -- --class LifeTables(Resource): -- """ -- description: LifeTables -- Restful API: get -- ChangeLog: -- """ -- -- def get(self): -- """ -- return all table names in the database -- -- Returns: -- Return the table names in the database as a list -- """ -- try: -- with DBHelper(db_name="lifecycle") as database_name: -- # View all table names in the package-info database -- all_table_names = database_name.engine.table_names() -- all_table_names.remove("packages_issue") -- all_table_names.remove("packages_maintainer") -- return jsonify( -- ResponseCode.response_json( -- ResponseCode.SUCCESS, data=all_table_names) -- ) -- except (SQLAlchemyError, DisconnectionError, Error, ValueError) as sql_error: -- LOGGER.logger.error(sql_error) -- return jsonify( -- ResponseCode.response_json(ResponseCode.DATABASE_NOT_FOUND) -- ) -- -- --class IssueView(Resource): -- """ -- Issue content collection -- """ -- -- def _query_issues(self, request_data): -- """ -- Args: -- request_data: -- Returns: -- """ -- try: -- with DBHelper(db_name='lifecycle') as database: -- issues_query = database.session.query(PackagesIssue.issue_id, -- PackagesIssue.issue_url, -- PackagesIssue.issue_title, -- PackagesIssue.issue_status, -- PackagesIssue.pkg_name, -- PackagesIssue.issue_type, -- PackagesMaintainer.maintainer). \ -- outerjoin(PackagesMaintainer, -- PackagesMaintainer.name == PackagesIssue.pkg_name) -- if request_data.get("pkg_name"): -- issues_query = issues_query.filter( -- PackagesIssue.pkg_name == request_data.get("pkg_name")) -- if request_data.get("issue_type"): -- issues_query = issues_query.filter( -- PackagesIssue.issue_type == request_data.get("issue_type")) -- if request_data.get("issue_status"): -- issues_query = issues_query.filter( -- PackagesIssue.issue_status == request_data.get("issue_status")) -- if request_data.get("maintainer"): -- issues_query = issues_query.filter( -- PackagesMaintainer.maintainer == request_data.get("maintainer")) -- total_count = issues_query.count() -- total_page = math.ceil( -- total_count / int(request_data.get("page_size"))) -- issues_query = issues_query.limit(request_data.get("page_size")).offset( -- (int(request_data.get("page_num")) - 1) * int(request_data.get("page_size"))) -- issue_dicts = IssuePageSchema( -- many=True).dump(issues_query.all()) -- issue_data = ResponseCode.response_json( -- ResponseCode.SUCCESS, issue_dicts) -- issue_data['total_count'] = total_count -- issue_data['total_page'] = total_page -- return issue_data -- except (SQLAlchemyError, DisconnectionError) as error: -- current_app.logger.error(error) -- return ResponseCode.response_json(ResponseCode.DATABASE_NOT_FOUND) -- -- def get(self): -- """ -- Description: Get all issues info or one specific issue -- Args: -- Returns: -- [ -- { -- "issue_id": "", -- "issue_url": "", -- "issue_title": "", -- "issue_content": "", -- "issue_status": "", -- "issue_type": "" -- }, -- ] -- Raises: -- DisconnectionError: Unable to connect to database exception -- AttributeError: Object does not have this property -- TypeError: Exception of type -- Error: Abnormal error -- """ -- schema = IssueSchema() -- if schema.validate(request.args): -- return jsonify( -- ResponseCode.response_json(ResponseCode.PARAM_ERROR) -- ) -- issue_dict = self._query_issues(request.args) -- return issue_dict -- -- --class IssueType(Resource): -- """ -- Issue type collection -- """ -- -- def _get_issue_type(self): -- """ -- Description: Query issue type -- Returns: -- """ -- try: -- with DBHelper(db_name='lifecycle') as database: -- issues_query = database.session.query(PackagesIssue.issue_type).group_by( -- PackagesIssue.issue_type).all() -- return jsonify(ResponseCode.response_json( -- ResponseCode.SUCCESS, [issue_query[0] for issue_query in issues_query])) -- except (SQLAlchemyError, DisconnectionError) as error: -- current_app.logger.error(error) -- return jsonify(ResponseCode.response_json( -- ResponseCode.PARAM_ERROR)) -- -- def get(self): -- """ -- Description: Get all issues info or one specific issue -- Args: -- Returns: -- [ -- "issue_type", -- "issue_type" -- ] -- Raises: -- DisconnectionError: Unable to connect to database exception -- AttributeError: Object does not have this property -- TypeError: Exception of type -- Error: Abnormal error -- """ -- return self._get_issue_type() -- -- --class IssueStatus(Resource): -- """ -- Issue status collection -- """ -- -- def _get_issue_status(self): -- """ -- Description: Query issue status -- Returns: -- """ -- try: -- with DBHelper(db_name='lifecycle') as database: -- issues_query = database.session.query(PackagesIssue.issue_status).group_by( -- PackagesIssue.issue_status).all() -- return jsonify(ResponseCode.response_json( -- ResponseCode.SUCCESS, [issue_query[0] for issue_query in issues_query])) -- except (SQLAlchemyError, DisconnectionError) as error: -- current_app.logger.error(error) -- return jsonify(ResponseCode.response_json( -- ResponseCode.PARAM_ERROR)) -- -- def get(self): -- """ -- Description: Get all issues info or one specific issue -- Args: -- Returns: -- [ -- "issue_status", -- "issue_status" -- ] -- Raises: -- DisconnectionError: Unable to connect to database exception -- AttributeError: Object does not have this property -- TypeError: Exception of type -- Error: Abnormal error -- """ -- return self._get_issue_status() -- -- --class IssueCatch(Resource): -- """ -- description: Catch issue content -- Restful API: put -- ChangeLog: -- """ -- -- def post(self): -- """ -- Searching issue content -- Args: -- Returns: -- for examples: -- [ -- { -- "issue_id": "", -- "issue_url": "", -- "issue_title": "", -- "issue_content": "", -- "issue_status": "", -- "issue_type": "" -- }, -- ] -- Raises: -- DisconnectionError: Unable to connect to database exception -- AttributeError: Object does not have this property -- TypeError: Exception of type -- Error: Abnormal error -- """ -- data = json.loads(request.get_data()) -- if not isinstance(data, dict): -- return jsonify( -- ResponseCode.response_json(ResponseCode.PARAM_ERROR)) -- pkg_name = data["repository"]["path"] -- try: -- _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -- pool_workers = _readconfig.get_config('LIFECYCLE', 'pool_workers') -- _warehouse = _readconfig.get_config('LIFECYCLE', 'warehouse') -- if _warehouse is None: -- _warehouse = 'src-openeuler' -- if not isinstance(pool_workers, int): -- pool_workers = 10 -- pool = ThreadPoolExecutor(max_workers=pool_workers) -- with DBHelper(db_name="lifecycle") as database: -- for table_name in filter(lambda x: x not in ['packages_issue', 'packages_maintainer'], -- database.engine.table_names()): -- cls_model = Packages.package_meta(table_name) -- for package_item in database.session.query(cls_model).filter( -- cls_model.name == pkg_name).all(): -- gitee_issue = gitee( -- package_item, _warehouse, package_item.name, table_name) -- pool.submit(gitee_issue.issue_hooks, data) -- pool.shutdown() -- return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS)) -- except SQLAlchemyError as error_msg: -- current_app.logger.error(error_msg) -- -- --class UpdatePackages(Resource): -- """ -- description:Life cycle update information of a single package -- Restful API: post -- ChangeLog: -- """ -- -- def _get_all_yaml_name(self, filepath): -- """ -- List of all yaml file names in the folder -- -- Args: -- filepath: file path -- -- Returns: -- yaml_file_list:List of all yaml file names in the folder -- -- Attributes: -- Error:Error -- NotADirectoryError:Invalid directory name -- FileNotFoundError:File not found error -- -- """ -- try: -- yaml_file_list = os.listdir(filepath) -- return yaml_file_list -- except (Error, NotADirectoryError, FileNotFoundError) as error: -- current_app.logger.error(error) -- return None -- -- def _get_yaml_content(self, yaml_file, filepath): -- """ -- Read the content of the yaml file -- -- Args: -- yaml_file: yaml file -- filepath: file path -- -- Returns: -- Return a dictionary containing name, maintainer and maintainlevel -- """ -- yaml_data_dict = dict() -- if not yaml_file.endswith(".yaml"): -- return None -- pkg_name = yaml_file.rsplit('.yaml')[0] -- single_yaml_path = os.path.join(filepath, yaml_file) -- with open(single_yaml_path, 'r', encoding='utf-8') as file_context: -- yaml_flie_data = yaml.load( -- file_context.read(), Loader=yaml.FullLoader) -- if yaml_flie_data is None or not isinstance(yaml_flie_data, dict): -- return None -- maintainer = yaml_flie_data.get("maintainer") -- maintainlevel = yaml_flie_data.get("maintainlevel") -- yaml_data_dict['name'] = pkg_name -- if maintainer: -- yaml_data_dict['maintainer'] = maintainer -- if maintainlevel: -- yaml_data_dict['maintainlevel'] = maintainlevel -- return yaml_data_dict -- -- def _read_yaml_file(self, filepath): -- """ -- Read the yaml file and combine the data of the nested dictionary of the list -- -- Args: -- filepath: file path -- -- Returns: -- yaml.YAMLError:yaml file error -- SQLAlchemyError:SQLAlchemy Error -- DisconnectionError:Connect to database error -- Error:Error -- """ -- yaml_file_list = self._get_all_yaml_name(filepath) -- if not yaml_file_list: -- return None -- try: -- yaml_data_list = list() -- _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -- pool_workers = _readconfig.get_config('LIFECYCLE', 'pool_workers') -- if not isinstance(pool_workers, int): -- pool_workers = 10 -- with ThreadPoolExecutor(max_workers=pool_workers) as pool: -- for yaml_file in yaml_file_list: -- pool_result = pool.submit( -- self._get_yaml_content, yaml_file, filepath) -- yaml_data_dict = pool_result.result() -- yaml_data_list.append(yaml_data_dict) -- return yaml_data_list -- except (yaml.YAMLError, SQLAlchemyError, DisconnectionError, Error) as error: -- current_app.logger.error(error) -- return None -- -- def _verification_yaml_data_list(self, yaml_data_list): -- """ -- Verify the data obtained in the yaml file -- -- Args: -- yaml_data_list: yaml data list -- -- Returns: -- yaml_data_list: After verification yaml data list -- -- Attributes: -- ValidationError: Validation error -- -- """ -- try: -- DataFormatVerfi(many=True).load(yaml_data_list) -- return yaml_data_list -- except ValidationError as error: -- current_app.logger.error(error.messages) -- return None -- -- def _save_in_database(self, yaml_data_list): -- """ -- Save the data to the database -- -- Args: -- tbname: Table Name -- name_separate_list: Split name list -- _update_pack_data: Split new list of combined data -- -- Returns: -- SUCCESS or UPDATA_DATA_FAILED -- -- Attributes -- DisconnectionError: Connect to database error -- SQLAlchemyError: SQLAlchemy Error -- Error: Error -- -- """ -- try: -- with DBHelper(db_name="lifecycle") as database_name: -- if 'packages_maintainer' not in database_name.engine.table_names(): -- return jsonify(ResponseCode.response_json( -- ResponseCode.TABLE_NAME_NOT_EXIST)) -- database_name.session.begin(subtransactions=True) -- for yaml_data in yaml_data_list: -- name = yaml_data.get("name") -- maintainer = yaml_data.get("maintainer") -- maintainlevel = yaml_data.get("maintainlevel") -- packages_maintainer_obj = database_name.session.query( -- PackagesMaintainer).filter_by(name=name).first() -- if packages_maintainer_obj: -- if maintainer: -- packages_maintainer_obj.maintainer = maintainer -- if maintainlevel: -- packages_maintainer_obj.maintainlevel = maintainlevel -- else: -- database_name.add(PackagesMaintainer( -- name=name, maintainer=maintainer, maintainlevel=maintainlevel -- )) -- database_name.session.commit() -- return jsonify(ResponseCode.response_json( -- ResponseCode.SUCCESS)) -- except (DisconnectionError, SQLAlchemyError, Error, AttributeError) as error: -- current_app.logger.error(error) -- return jsonify(ResponseCode.response_json( -- ResponseCode.UPDATA_DATA_FAILED)) -- -- def _overall_process( -- self, -- filepath): -- """ -- Call each method to complete the entire function -- -- Args: -- filepath: file path -- tbname: table name -- -- Returns: -- SUCCESS or UPDATA_DATA_FAILED -- -- Attributes -- DisconnectionError: Connect to database error -- SQLAlchemyError: SQLAlchemy Error -- Error: Error -- """ -- try: -- if filepath is None or not os.path.exists(filepath): -- return jsonify(ResponseCode.response_json( -- ResponseCode.SPECIFIED_FILE_NOT_EXIST)) -- yaml_file_list = self._get_all_yaml_name(filepath) -- if not yaml_file_list: -- return jsonify(ResponseCode.response_json( -- ResponseCode.EMPTY_FOLDER)) -- yaml_data_list_result = self._read_yaml_file(filepath) -- yaml_data_list = self._verification_yaml_data_list( -- yaml_data_list_result) -- if yaml_data_list is None: -- return jsonify(ResponseCode.response_json( -- ResponseCode.YAML_FILE_ERROR)) -- result = self._save_in_database( -- yaml_data_list) -- return result -- except (DisconnectionError, SQLAlchemyError, Error) as error: -- current_app.logger.error(error) -- return jsonify(ResponseCode.response_json( -- ResponseCode.UPDATA_DATA_FAILED)) -- -- def _update_single_package_info( -- self, srcname, maintainer, maintainlevel): -- """ -- Update the maintainer field and maintainlevel -- field of a single package -- -- Args: -- srcname: The name of the source package -- maintainer: Package maintainer -- maintainlevel: Package maintenance level -- -- Returns: -- success or failed -- -- Attributes -- SQLAlchemyError: sqlalchemy error -- DisconnectionError: Cannot connect to database error -- Error: Error -- """ -- if not srcname: -- return jsonify( -- ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND) -- ) -- if not maintainer and not maintainlevel: -- return jsonify( -- ResponseCode.response_json(ResponseCode.PARAM_ERROR) -- ) -- try: -- with DBHelper(db_name='lifecycle') as database_name: -- if 'packages_maintainer' not in database_name.engine.table_names(): -- return jsonify(ResponseCode.response_json( -- ResponseCode.TABLE_NAME_NOT_EXIST)) -- update_obj = database_name.session.query( -- PackagesMaintainer).filter_by(name=srcname).first() -- if update_obj: -- if maintainer: -- update_obj.maintainer = maintainer -- if maintainlevel: -- update_obj.maintainlevel = maintainlevel -- else: -- database_name.add(PackagesMaintainer( -- name=srcname, maintainer=maintainer, maintainlevel=maintainlevel -- )) -- database_name.session.commit() -- return jsonify( -- ResponseCode.response_json( -- ResponseCode.SUCCESS)) -- except (SQLAlchemyError, DisconnectionError, Error) as sql_error: -- current_app.logger.error(sql_error) -- database_name.session.rollback() -- return jsonify(ResponseCode.response_json( -- ResponseCode.UPDATA_DATA_FAILED -- )) -- -- def put(self): -- """ -- Life cycle update information of a single package or -- All packages -- -- Returns: -- for example:: -- { -- "code": "", -- "data": "", -- "msg": "" -- } -- """ -- schema = UpdatePackagesSchema() -- data = request.get_json() -- if schema.validate(data): -- return jsonify( -- ResponseCode.response_json(ResponseCode.PARAM_ERROR) -- ) -- srcname = data.get('pkg_name', None) -- maintainer = data.get('maintainer', None) -- maintainlevel = data.get('maintainlevel', None) -- batch = data.get('batch') -- filepath = data.get('filepath', None) -- -- if batch: -- result = self._overall_process(filepath) -- else: -- result = self._update_single_package_info( -- srcname, maintainer, maintainlevel) -- return result -+#!/usr/bin/python3 -+""" -+Life cycle related api interface -+""" -+import io -+import json -+import math -+import os -+from concurrent.futures import ThreadPoolExecutor -+ -+import pandas as pd -+import yaml -+ -+from flask import request -+from flask import jsonify, make_response -+from flask import current_app -+from flask_restful import Resource -+from marshmallow import ValidationError -+ -+from sqlalchemy.exc import DisconnectionError, SQLAlchemyError -+ -+from packageship import system_config -+from packageship.libs.configutils.readconfig import ReadConfig -+from packageship.libs.exception import Error -+from packageship.application.apps.package.function.constants import ResponseCode -+from packageship.libs.dbutils.sqlalchemy_helper import DBHelper -+from packageship.application.models.package import PackagesIssue -+from packageship.application.models.package import Packages -+from packageship.application.models.package import PackagesMaintainer -+from packageship.libs.log import Log -+from .serialize import IssueDownloadSchema, PackagesDownloadSchema, IssuePageSchema, IssueSchema -+from ..package.serialize import DataFormatVerfi, UpdatePackagesSchema -+from .function.gitee import Gitee as gitee -+ -+LOGGER = Log(__name__) -+ -+ -+# pylint: disable = no-self-use -+ -+class DownloadFile(Resource): -+ """ -+ Download the content of the issue or the excel file of the package content -+ """ -+ -+ def _download_excel(self, file_type, table_name=None): -+ """ -+ Download excel file -+ """ -+ file_name = 'packages.xlsx' -+ if file_type == 'packages': -+ download_content = self.__get_packages_content(table_name) -+ else: -+ file_name = 'issues.xlsx' -+ download_content = self.__get_issues_content() -+ if download_content is None: -+ return jsonify( -+ ResponseCode.response_json( -+ ResponseCode.SERVICE_ERROR)) -+ pd_dataframe = self.__to_dataframe(download_content) -+ -+ _response = self.__bytes_save(pd_dataframe) -+ return self.__set_response_header(_response, file_name) -+ -+ def __bytes_save(self, data_frame): -+ """ -+ Save the file content in the form of a binary file stream -+ """ -+ try: -+ bytes_io = io.BytesIO() -+ writer = pd.ExcelWriter( # pylint: disable=abstract-class-instantiated -+ bytes_io, engine='xlsxwriter') -+ data_frame.to_excel(writer, sheet_name='Summary', index=False) -+ writer.save() -+ writer.close() -+ bytes_io.seek(0) -+ _response = make_response(bytes_io.getvalue()) -+ bytes_io.close() -+ return _response -+ except (IOError, Error) as io_error: -+ current_app.logger.error(io_error) -+ return make_response() -+ -+ def __set_response_header(self, response, file_name): -+ """ -+ Set http response header information -+ """ -+ response.headers['Content-Type'] = \ -+ "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" -+ response.headers["Cache-Control"] = "no-cache" -+ response.headers['Content-Disposition'] = 'attachment; filename={file_name}'.format( -+ file_name=file_name) -+ return response -+ -+ def __get_packages_content(self, table_name): -+ """ -+ Get package list information -+ """ -+ try: -+ with DBHelper(db_name='lifecycle') as database: -+ # Query all package data in the specified table -+ _model = Packages.package_meta(table_name) -+ _packageinfos = database.session.query(_model).all() -+ packages_dicts = PackagesDownloadSchema( -+ many=True).dump(_packageinfos) -+ return packages_dicts -+ -+ except (SQLAlchemyError, DisconnectionError) as error: -+ current_app.logger.error(error) -+ return None -+ -+ def __get_issues_content(self): -+ """ -+ Get the list of issues -+ """ -+ try: -+ with DBHelper(db_name='lifecycle') as database: -+ _issues = database.session.query(PackagesIssue).all() -+ issues_dicts = IssueDownloadSchema(many=True).dump(_issues) -+ return issues_dicts -+ except (SQLAlchemyError, DisconnectionError) as error: -+ current_app.logger.error(error) -+ return None -+ -+ def __to_dataframe(self, datas): -+ """ -+ Convert the obtained information into pandas content format -+ """ -+ -+ data_frame = pd.DataFrame(datas) -+ return data_frame -+ -+ def get(self, file_type): -+ """ -+ Download package collection information and isse list information -+ -+ """ -+ if file_type not in ['packages', 'issues']: -+ return jsonify( -+ ResponseCode.response_json( -+ ResponseCode.PARAM_ERROR)) -+ -+ table_name = request.args.get('table_name', None) -+ response = self._download_excel(file_type, table_name) -+ return response -+ -+ -+class MaintainerView(Resource): -+ """ -+ Maintainer name collection -+ """ -+ -+ def __query_maintainers(self): -+ """ -+ Query the names of all maintainers in the specified table -+ """ -+ try: -+ with DBHelper(db_name='lifecycle') as database: -+ maintainers = database.session.query( -+ PackagesMaintainer.maintainer).group_by(PackagesMaintainer.maintainer).all() -+ return [maintainer_item[0] for maintainer_item in maintainers -+ if maintainer_item[0]] -+ except (SQLAlchemyError, DisconnectionError) as error: -+ current_app.logger.error(error) -+ return [] -+ -+ def get(self): -+ """ -+ Get the list of maintainers -+ """ -+ # Group query of the names of all maintainers in the current table -+ maintainers = self.__query_maintainers() -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.SUCCESS, -+ maintainers)) -+ -+ -+class TableColView(Resource): -+ """ -+ The default column of the package shows the interface -+ """ -+ -+ def __columns_names(self): -+ """ -+ Mapping of column name and title -+ """ -+ columns = [ -+ ('name', 'Name', True), -+ ('version', 'Version', True), -+ ('release', 'Release', True), -+ ('url', 'Url', True), -+ ('rpm_license', 'License', False), -+ ('feature', 'Feature', False), -+ ('maintainer', 'Maintainer', True), -+ ('maintainlevel', 'Maintenance Level', True), -+ ('release_time', 'Release Time', False), -+ ('used_time', 'Used Time', True), -+ ('maintainer_status', 'Maintain Status', True), -+ ('latest_version', 'Latest Version', False), -+ ('latest_version_time', 'Latest Version Release Time', False), -+ ('issue', 'Issue', True)] -+ return columns -+ -+ def __columns_mapping(self): -+ """ -+ -+ """ -+ columns = list() -+ for column in self.__columns_names(): -+ columns.append({ -+ 'column_name': column[0], -+ 'label': column[1], -+ 'default_selected': column[2] -+ }) -+ return columns -+ -+ def get(self): -+ """ -+ Get the default display column of the package -+ -+ """ -+ table_mapping_columns = self.__columns_mapping() -+ return jsonify( -+ ResponseCode.response_json( -+ ResponseCode.SUCCESS, -+ table_mapping_columns)) -+ -+ -+class LifeTables(Resource): -+ """ -+ description: LifeTables -+ Restful API: get -+ ChangeLog: -+ """ -+ -+ def get(self): -+ """ -+ return all table names in the database -+ -+ Returns: -+ Return the table names in the database as a list -+ """ -+ try: -+ with DBHelper(db_name="lifecycle") as database_name: -+ # View all table names in the package-info database -+ all_table_names = database_name.engine.table_names() -+ all_table_names.remove("packages_issue") -+ all_table_names.remove("packages_maintainer") -+ return jsonify( -+ ResponseCode.response_json( -+ ResponseCode.SUCCESS, data=all_table_names) -+ ) -+ except (SQLAlchemyError, DisconnectionError, Error, ValueError) as sql_error: -+ LOGGER.logger.error(sql_error) -+ return jsonify( -+ ResponseCode.response_json(ResponseCode.DATABASE_NOT_FOUND) -+ ) -+ -+ -+class IssueView(Resource): -+ """ -+ Issue content collection -+ """ -+ -+ def _query_issues(self, request_data): -+ """ -+ Args: -+ request_data: -+ Returns: -+ """ -+ try: -+ with DBHelper(db_name='lifecycle') as database: -+ issues_query = database.session.query(PackagesIssue.issue_id, -+ PackagesIssue.issue_url, -+ PackagesIssue.issue_title, -+ PackagesIssue.issue_status, -+ PackagesIssue.pkg_name, -+ PackagesIssue.issue_type, -+ PackagesMaintainer.maintainer). \ -+ outerjoin(PackagesMaintainer, -+ PackagesMaintainer.name == PackagesIssue.pkg_name) -+ if request_data.get("pkg_name"): -+ issues_query = issues_query.filter( -+ PackagesIssue.pkg_name == request_data.get("pkg_name")) -+ if request_data.get("issue_type"): -+ issues_query = issues_query.filter( -+ PackagesIssue.issue_type == request_data.get("issue_type")) -+ if request_data.get("issue_status"): -+ issues_query = issues_query.filter( -+ PackagesIssue.issue_status == request_data.get("issue_status")) -+ if request_data.get("maintainer"): -+ issues_query = issues_query.filter( -+ PackagesMaintainer.maintainer == request_data.get("maintainer")) -+ total_count = issues_query.count() -+ total_page = math.ceil( -+ total_count / int(request_data.get("page_size"))) -+ issues_query = issues_query.limit(request_data.get("page_size")).offset( -+ (int(request_data.get("page_num")) - 1) * int(request_data.get("page_size"))) -+ issue_dicts = IssuePageSchema( -+ many=True).dump(issues_query.all()) -+ issue_data = ResponseCode.response_json( -+ ResponseCode.SUCCESS, issue_dicts) -+ issue_data['total_count'] = total_count -+ issue_data['total_page'] = total_page -+ return issue_data -+ except (SQLAlchemyError, DisconnectionError) as error: -+ current_app.logger.error(error) -+ return ResponseCode.response_json(ResponseCode.DATABASE_NOT_FOUND) -+ -+ def get(self): -+ """ -+ Description: Get all issues info or one specific issue -+ Args: -+ Returns: -+ [ -+ { -+ "issue_id": "", -+ "issue_url": "", -+ "issue_title": "", -+ "issue_content": "", -+ "issue_status": "", -+ "issue_type": "" -+ }, -+ ] -+ Raises: -+ DisconnectionError: Unable to connect to database exception -+ AttributeError: Object does not have this property -+ TypeError: Exception of type -+ Error: Abnormal error -+ """ -+ schema = IssueSchema() -+ if schema.validate(request.args): -+ return jsonify( -+ ResponseCode.response_json(ResponseCode.PARAM_ERROR) -+ ) -+ issue_dict = self._query_issues(request.args) -+ return issue_dict -+ -+ -+class IssueType(Resource): -+ """ -+ Issue type collection -+ """ -+ -+ def _get_issue_type(self): -+ """ -+ Description: Query issue type -+ Returns: -+ """ -+ try: -+ with DBHelper(db_name='lifecycle') as database: -+ issues_query = database.session.query(PackagesIssue.issue_type).group_by( -+ PackagesIssue.issue_type).all() -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.SUCCESS, [issue_query[0] for issue_query in issues_query])) -+ except (SQLAlchemyError, DisconnectionError) as error: -+ current_app.logger.error(error) -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.PARAM_ERROR)) -+ -+ def get(self): -+ """ -+ Description: Get all issues info or one specific issue -+ Args: -+ Returns: -+ [ -+ "issue_type", -+ "issue_type" -+ ] -+ Raises: -+ DisconnectionError: Unable to connect to database exception -+ AttributeError: Object does not have this property -+ TypeError: Exception of type -+ Error: Abnormal error -+ """ -+ return self._get_issue_type() -+ -+ -+class IssueStatus(Resource): -+ """ -+ Issue status collection -+ """ -+ -+ def _get_issue_status(self): -+ """ -+ Description: Query issue status -+ Returns: -+ """ -+ try: -+ with DBHelper(db_name='lifecycle') as database: -+ issues_query = database.session.query(PackagesIssue.issue_status).group_by( -+ PackagesIssue.issue_status).all() -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.SUCCESS, [issue_query[0] for issue_query in issues_query])) -+ except (SQLAlchemyError, DisconnectionError) as error: -+ current_app.logger.error(error) -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.PARAM_ERROR)) -+ -+ def get(self): -+ """ -+ Description: Get all issues info or one specific issue -+ Args: -+ Returns: -+ [ -+ "issue_status", -+ "issue_status" -+ ] -+ Raises: -+ DisconnectionError: Unable to connect to database exception -+ AttributeError: Object does not have this property -+ TypeError: Exception of type -+ Error: Abnormal error -+ """ -+ return self._get_issue_status() -+ -+ -+class IssueCatch(Resource): -+ """ -+ description: Catch issue content -+ Restful API: put -+ ChangeLog: -+ """ -+ -+ def post(self): -+ """ -+ Searching issue content -+ Args: -+ Returns: -+ for examples: -+ [ -+ { -+ "issue_id": "", -+ "issue_url": "", -+ "issue_title": "", -+ "issue_content": "", -+ "issue_status": "", -+ "issue_type": "" -+ }, -+ ] -+ Raises: -+ DisconnectionError: Unable to connect to database exception -+ AttributeError: Object does not have this property -+ TypeError: Exception of type -+ Error: Abnormal error -+ """ -+ data = json.loads(request.get_data()) -+ if not isinstance(data, dict): -+ return jsonify( -+ ResponseCode.response_json(ResponseCode.PARAM_ERROR)) -+ pkg_name = data["repository"]["path"] -+ try: -+ _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -+ pool_workers = _readconfig.get_config('LIFECYCLE', 'pool_workers') -+ _warehouse = _readconfig.get_config('LIFECYCLE', 'warehouse') -+ if _warehouse is None: -+ _warehouse = 'src-openeuler' -+ if not isinstance(pool_workers, int): -+ pool_workers = 10 -+ pool = ThreadPoolExecutor(max_workers=pool_workers) -+ with DBHelper(db_name="lifecycle") as database: -+ for table_name in filter(lambda x: x not in ['packages_issue', 'packages_maintainer', 'database_info'], -+ database.engine.table_names()): -+ cls_model = Packages.package_meta(table_name) -+ for package_item in database.session.query(cls_model).filter( -+ cls_model.name == pkg_name).all(): -+ gitee_issue = gitee( -+ package_item, _warehouse, package_item.name, table_name) -+ pool.submit(gitee_issue.issue_hooks, data) -+ pool.shutdown() -+ return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS)) -+ except SQLAlchemyError as error_msg: -+ current_app.logger.error(error_msg) -+ -+ -+class UpdatePackages(Resource): -+ """ -+ description:Life cycle update information of a single package -+ Restful API: post -+ ChangeLog: -+ """ -+ -+ def _get_all_yaml_name(self, filepath): -+ """ -+ List of all yaml file names in the folder -+ -+ Args: -+ filepath: file path -+ -+ Returns: -+ yaml_file_list:List of all yaml file names in the folder -+ -+ Attributes: -+ Error:Error -+ NotADirectoryError:Invalid directory name -+ FileNotFoundError:File not found error -+ -+ """ -+ try: -+ yaml_file_list = os.listdir(filepath) -+ return yaml_file_list -+ except (Error, NotADirectoryError, FileNotFoundError) as error: -+ current_app.logger.error(error) -+ return None -+ -+ def _get_yaml_content(self, yaml_file, filepath): -+ """ -+ Read the content of the yaml file -+ -+ Args: -+ yaml_file: yaml file -+ filepath: file path -+ -+ Returns: -+ Return a dictionary containing name, maintainer and maintainlevel -+ """ -+ yaml_data_dict = dict() -+ if not yaml_file.endswith(".yaml"): -+ return None -+ pkg_name = yaml_file.rsplit('.yaml')[0] -+ single_yaml_path = os.path.join(filepath, yaml_file) -+ with open(single_yaml_path, 'r', encoding='utf-8') as file_context: -+ yaml_flie_data = yaml.load( -+ file_context.read(), Loader=yaml.FullLoader) -+ if yaml_flie_data is None or not isinstance(yaml_flie_data, dict): -+ return None -+ maintainer = yaml_flie_data.get("maintainer") -+ maintainlevel = yaml_flie_data.get("maintainlevel") -+ yaml_data_dict['name'] = pkg_name -+ if maintainer: -+ yaml_data_dict['maintainer'] = maintainer -+ if maintainlevel: -+ yaml_data_dict['maintainlevel'] = maintainlevel -+ return yaml_data_dict -+ -+ def _read_yaml_file(self, filepath): -+ """ -+ Read the yaml file and combine the data of the nested dictionary of the list -+ -+ Args: -+ filepath: file path -+ -+ Returns: -+ yaml.YAMLError:yaml file error -+ SQLAlchemyError:SQLAlchemy Error -+ DisconnectionError:Connect to database error -+ Error:Error -+ """ -+ yaml_file_list = self._get_all_yaml_name(filepath) -+ if not yaml_file_list: -+ return None -+ try: -+ yaml_data_list = list() -+ _readconfig = ReadConfig(system_config.SYS_CONFIG_PATH) -+ pool_workers = _readconfig.get_config('LIFECYCLE', 'pool_workers') -+ if not isinstance(pool_workers, int): -+ pool_workers = 10 -+ with ThreadPoolExecutor(max_workers=pool_workers) as pool: -+ for yaml_file in yaml_file_list: -+ pool_result = pool.submit( -+ self._get_yaml_content, yaml_file, filepath) -+ yaml_data_dict = pool_result.result() -+ yaml_data_list.append(yaml_data_dict) -+ return yaml_data_list -+ except (yaml.YAMLError, SQLAlchemyError, DisconnectionError, Error) as error: -+ current_app.logger.error(error) -+ return None -+ -+ def _verification_yaml_data_list(self, yaml_data_list): -+ """ -+ Verify the data obtained in the yaml file -+ -+ Args: -+ yaml_data_list: yaml data list -+ -+ Returns: -+ yaml_data_list: After verification yaml data list -+ -+ Attributes: -+ ValidationError: Validation error -+ -+ """ -+ try: -+ DataFormatVerfi(many=True).load(yaml_data_list) -+ return yaml_data_list -+ except ValidationError as error: -+ current_app.logger.error(error.messages) -+ return None -+ -+ def _save_in_database(self, yaml_data_list): -+ """ -+ Save the data to the database -+ -+ Args: -+ tbname: Table Name -+ name_separate_list: Split name list -+ _update_pack_data: Split new list of combined data -+ -+ Returns: -+ SUCCESS or UPDATA_DATA_FAILED -+ -+ Attributes -+ DisconnectionError: Connect to database error -+ SQLAlchemyError: SQLAlchemy Error -+ Error: Error -+ -+ """ -+ try: -+ with DBHelper(db_name="lifecycle") as database_name: -+ if 'packages_maintainer' not in database_name.engine.table_names(): -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.TABLE_NAME_NOT_EXIST)) -+ database_name.session.begin(subtransactions=True) -+ for yaml_data in yaml_data_list: -+ name = yaml_data.get("name") -+ maintainer = yaml_data.get("maintainer") -+ maintainlevel = yaml_data.get("maintainlevel") -+ packages_maintainer_obj = database_name.session.query( -+ PackagesMaintainer).filter_by(name=name).first() -+ if packages_maintainer_obj: -+ if maintainer: -+ packages_maintainer_obj.maintainer = maintainer -+ if maintainlevel: -+ packages_maintainer_obj.maintainlevel = maintainlevel -+ else: -+ database_name.add(PackagesMaintainer( -+ name=name, maintainer=maintainer, maintainlevel=maintainlevel -+ )) -+ database_name.session.commit() -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.SUCCESS)) -+ except (DisconnectionError, SQLAlchemyError, Error, AttributeError) as error: -+ current_app.logger.error(error) -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.UPDATA_DATA_FAILED)) -+ -+ def _overall_process( -+ self, -+ filepath): -+ """ -+ Call each method to complete the entire function -+ -+ Args: -+ filepath: file path -+ tbname: table name -+ -+ Returns: -+ SUCCESS or UPDATA_DATA_FAILED -+ -+ Attributes -+ DisconnectionError: Connect to database error -+ SQLAlchemyError: SQLAlchemy Error -+ Error: Error -+ """ -+ try: -+ if filepath is None or not os.path.exists(filepath): -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.SPECIFIED_FILE_NOT_EXIST)) -+ yaml_file_list = self._get_all_yaml_name(filepath) -+ if not yaml_file_list: -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.EMPTY_FOLDER)) -+ yaml_data_list_result = self._read_yaml_file(filepath) -+ yaml_data_list = self._verification_yaml_data_list( -+ yaml_data_list_result) -+ if yaml_data_list is None: -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.YAML_FILE_ERROR)) -+ result = self._save_in_database( -+ yaml_data_list) -+ return result -+ except (DisconnectionError, SQLAlchemyError, Error) as error: -+ current_app.logger.error(error) -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.UPDATA_DATA_FAILED)) -+ -+ def _update_single_package_info( -+ self, srcname, maintainer, maintainlevel): -+ """ -+ Update the maintainer field and maintainlevel -+ field of a single package -+ -+ Args: -+ srcname: The name of the source package -+ maintainer: Package maintainer -+ maintainlevel: Package maintenance level -+ -+ Returns: -+ success or failed -+ -+ Attributes -+ SQLAlchemyError: sqlalchemy error -+ DisconnectionError: Cannot connect to database error -+ Error: Error -+ """ -+ if not srcname: -+ return jsonify( -+ ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND) -+ ) -+ if not maintainer and not maintainlevel: -+ return jsonify( -+ ResponseCode.response_json(ResponseCode.PARAM_ERROR) -+ ) -+ try: -+ with DBHelper(db_name='lifecycle') as database_name: -+ if 'packages_maintainer' not in database_name.engine.table_names(): -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.TABLE_NAME_NOT_EXIST)) -+ update_obj = database_name.session.query( -+ PackagesMaintainer).filter_by(name=srcname).first() -+ if update_obj: -+ if maintainer: -+ update_obj.maintainer = maintainer -+ if maintainlevel: -+ update_obj.maintainlevel = maintainlevel -+ else: -+ database_name.add(PackagesMaintainer( -+ name=srcname, maintainer=maintainer, maintainlevel=maintainlevel -+ )) -+ database_name.session.commit() -+ return jsonify( -+ ResponseCode.response_json( -+ ResponseCode.SUCCESS)) -+ except (SQLAlchemyError, DisconnectionError, Error) as sql_error: -+ current_app.logger.error(sql_error) -+ database_name.session.rollback() -+ return jsonify(ResponseCode.response_json( -+ ResponseCode.UPDATA_DATA_FAILED -+ )) -+ -+ def put(self): -+ """ -+ Life cycle update information of a single package or -+ All packages -+ -+ Returns: -+ for example:: -+ { -+ "code": "", -+ "data": "", -+ "msg": "" -+ } -+ """ -+ schema = UpdatePackagesSchema() -+ data = request.get_json() -+ if schema.validate(data): -+ return jsonify( -+ ResponseCode.response_json(ResponseCode.PARAM_ERROR) -+ ) -+ srcname = data.get('pkg_name', None) -+ maintainer = data.get('maintainer', None) -+ maintainlevel = data.get('maintainlevel', None) -+ batch = data.get('batch') -+ filepath = data.get('filepath', None) -+ -+ if batch: -+ result = self._overall_process(filepath) -+ else: -+ result = self._update_single_package_info( -+ srcname, maintainer, maintainlevel) -+ return result -diff -Naru a/packageship/application/apps/package/function/be_depend.py b/packageship/application/apps/package/function/be_depend.py ---- a/packageship/application/apps/package/function/be_depend.py 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/application/apps/package/function/be_depend.py 2020-09-22 23:48:32.402476132 +0800 -@@ -5,11 +5,12 @@ - This includes both install and build dependencies - Class: BeDepend - """ -+import copy -+from collections import namedtuple, defaultdict - from flask import current_app - from sqlalchemy import text - from sqlalchemy.exc import SQLAlchemyError - from sqlalchemy.sql import literal_column --from packageship.application.apps.package.function.constants import ResponseCode - from packageship.application.models.package import SrcPack - from packageship.libs.dbutils import DBHelper - -@@ -36,6 +37,8 @@ - self.source_name_set = set() - self.bin_name_set = set() - self.result_dict = dict() -+ self.comm_install_builds = defaultdict(set) -+ self.provides_name = set() - - def main(self): - """ -@@ -69,14 +72,16 @@ - [["root", None]] - ] - self.source_name_set.add(self.source_name) -- self.package_bedepend( -+ self._provides_bedepend( - [self.source_name], data_base, package_type='src') - -+ for _, value in self.result_dict.items(): -+ value[-1] = list(value[-1]) - return self.result_dict - -- def package_bedepend(self, pkg_name_list, data_base, package_type): -+ def _get_provides(self, pkg_name_list, data_base, package_type): - """ -- Description: Query the dependent function -+ Description: Query the components provided by the required package - Args: - pkg_name_list:source or binary packages name - data_base: database -@@ -84,35 +89,31 @@ - Returns: - Raises: - SQLAlchemyError: Database connection exception -- """ -- -+ """ -+ res = namedtuple( -+ 'restuple', [ -+ 'search_bin_name', 'search_bin_version', 'source_name']) - sql_com = """ -- SELECT DISTINCT b1.name AS search_bin_name, -+ SELECT DISTINCT b1.name AS search_bin_name, - b1.version AS search_bin_version, - b1.src_name AS source_name, -- b2.name AS bin_name, -- s1.name AS bebuild_src_name, -- b2.src_name AS install_depend_src_name -+ bin_provides.name As pro_name - FROM ( SELECT pkgKey,src_name,name,version FROM bin_pack WHERE {} ) b1 -- LEFT JOIN bin_provides ON bin_provides.pkgKey = b1.pkgKey -- LEFT JOIN bin_requires br ON br.name = bin_provides.name -- LEFT JOIN src_requires sr ON sr.name = bin_provides.name -- LEFT JOIN src_pack s1 ON s1.pkgKey = sr.pkgKey -- LEFT JOIN bin_pack b2 ON b2.pkgKey = br.pkgKey -- """ -+ LEFT JOIN bin_provides ON bin_provides.pkgKey = b1.pkgKey;""" - -+ # package_type - if package_type == 'src': - literal_name = 'src_name' -- - elif package_type == 'bin': - literal_name = 'name' - -- else: -- return -- -+ # Query database -+ # The lower version of SQLite can look up up to 999 parameters -+ # simultaneously, so use 900 sharding queries - try: - result = [] -- for input_name in (pkg_name_list[i:i+900] for i in range(0, len(pkg_name_list), 900)): -+ for input_name in (pkg_name_list[i:i + 900] -+ for i in range(0, len(pkg_name_list), 900)): - name_in = literal_column(literal_name).in_(input_name) - sql_str = text(sql_com.format(name_in)) - result.extend(data_base.session.execute( -@@ -124,74 +125,176 @@ - ).fetchall()) - except SQLAlchemyError as sql_err: - current_app.logger.error(sql_err) -- return ResponseCode.response_json(ResponseCode.CONNECT_DB_ERROR) -+ return - - if not result: - return - -- # Source and binary packages that were found to be dependent -- source_name_list = [] -- bin_name_list = [] -+ # Process the result of the component -+ pro_name_dict = dict() -+ -+ _components = set() - for obj in result: -- if obj.source_name is None: -- source_name = 'NOT FOUND' -- else: -- source_name = obj.source_name -- if obj.bebuild_src_name: -- # Determine if the source package has been checked -- parent_node = obj.bebuild_src_name -- be_type = "build" -- # Call the spell dictionary function -- self.make_dicts( -- obj.search_bin_name, -- source_name, -+ if not obj.pro_name: -+ continue -+ # De-weight components -+ if obj.pro_name not in self.comm_install_builds: -+ pro_name_dict[obj.pro_name] = res( -+ obj.search_bin_name, obj.search_bin_version, obj.source_name) -+ -+ if obj.search_bin_name not in self.result_dict: -+ self.result_dict[obj.search_bin_name] = [ -+ obj.source_name, - obj.search_bin_version, -- parent_node, -- be_type) -+ self.db_name, -+ self.comm_install_builds[obj.pro_name] -+ if self.comm_install_builds[obj.pro_name] else {(None, None)} -+ ] -+ tmp_ = copy.deepcopy(self.comm_install_builds[obj.pro_name]) - -- if obj.bebuild_src_name not in self.source_name_set: -- self.source_name_set.add(obj.bebuild_src_name) -- source_name_list.append(obj.bebuild_src_name) -- -- if obj.bin_name: -- # Determine if the bin package has been checked -- parent_node = obj.bin_name -- be_type = "install" -- # Call the spell dictionary function -- self.make_dicts( -- obj.search_bin_name, -- source_name, -- obj.search_bin_version, -- parent_node, -- be_type) -+ tmp_.discard((obj.search_bin_name, 'install')) -+ tmp_.discard((obj.search_bin_name, 'build')) - -- if obj.bin_name not in self.bin_name_set: -- self.bin_name_set.add(obj.bin_name) -- bin_name_list.append(obj.bin_name) -- -- # With_sub_pack=1 -- if self.with_sub_pack == "1": -- if obj.install_depend_src_name not in self.source_name_set: -- self.source_name_set.add( -- obj.install_depend_src_name) -- source_name_list.append( -- obj.install_depend_src_name) -- -- if obj.bebuild_src_name is None and obj.bin_name is None: -- parent_node = None -- be_type = None -- self.make_dicts( -- obj.search_bin_name, -- source_name, -- obj.search_bin_version, -- parent_node, -- be_type) -+ if (None, None) in self.result_dict[obj.search_bin_name][-1] \ -+ and self.comm_install_builds[obj.pro_name]: -+ self.result_dict[obj.search_bin_name][-1] = tmp_ -+ else: -+ self.result_dict[obj.search_bin_name][-1].update(tmp_) -+ return pro_name_dict -+ -+ def _provides_bedepend(self, pkg_name_list, data_base, package_type): -+ """ -+ Description: Query the dependent function -+ Args: -+ pkg_name_list:source or binary packages name -+ data_base: database -+ package_type: package type -+ Returns: -+ Raises: -+ SQLAlchemyError: Database connection exception -+ """ -+ # Query component -+ pro_names = self._get_provides(pkg_name_list, data_base, package_type) - -- if len(source_name_list) != 0: -- self.package_bedepend( -+ if not pro_names: -+ return -+ -+ sql_2_bin = """ -+ SELECT DISTINCT -+ b2.name AS bin_name, -+ b2.src_name AS install_depend_src_name, -+ br.name AS pro_name -+ FROM -+ ( SELECT name, pkgKey FROM bin_requires WHERE {}) br -+ LEFT JOIN bin_pack b2 ON b2.pkgKey = br.pkgKey; -+ """ -+ -+ sql_2_src = """ -+ SELECT DISTINCT -+ s1.name AS bebuild_src_name, -+ sr.name AS pro_name -+ FROM -+ ( SELECT name, pkgKey FROM src_requires WHERE {} ) sr -+ LEFT JOIN src_pack s1 ON s1.pkgKey = sr.pkgKey; -+ """ -+ -+ provides_name_list = [pro for pro, _ in pro_names.items()] -+ -+ result_2_bin = [] -+ result_2_src = [] -+ # Query database -+ try: -+ for input_name in ( -+ provides_name_list[i:i + 900] for i in range(0, len(provides_name_list), 900)): -+ name_in = literal_column('name').in_(input_name) -+ sql_str_2_bin = text(sql_2_bin.format(name_in)) -+ result_2_bin.extend(data_base.session.execute( -+ sql_str_2_bin, -+ { -+ 'name_{}'.format(i): v -+ for i, v in enumerate(input_name, 1) -+ } -+ ).fetchall()) -+ sql_str_2src = text(sql_2_src.format(name_in)) -+ result_2_src.extend(data_base.session.execute( -+ sql_str_2src, -+ { -+ 'name_{}'.format(i): v -+ for i, v in enumerate(input_name, 1) -+ } -+ ).fetchall()) -+ except SQLAlchemyError as sql_err: -+ current_app.logger.error(sql_err) -+ return -+ -+ source_name_list = [] -+ bin_name_list = [] -+ -+ # Process the data that the installation depends on -+ for bin_info in result_2_bin: -+ temp_bin_pkg = bin_info.bin_name -+ temp_sub_src_pkg = bin_info.install_depend_src_name -+ -+ #withsubpick ==1 -+ if self.with_sub_pack == '1' and temp_sub_src_pkg not in self.source_name_set: -+ self.source_name_set.add(temp_sub_src_pkg) -+ source_name_list.append(temp_sub_src_pkg) -+ -+ if temp_bin_pkg not in self.bin_name_set: -+ self.bin_name_set.add(temp_bin_pkg) -+ bin_name_list.append(temp_bin_pkg) -+ -+ if bin_info.pro_name not in self.comm_install_builds: -+ self.comm_install_builds[bin_info.pro_name] = { -+ (bin_info.bin_name, 'install') -+ } -+ -+ elif (bin_info.bin_name, 'install') not in \ -+ self.comm_install_builds[bin_info.pro_name]: -+ -+ self.comm_install_builds[bin_info.pro_name].add( -+ (bin_info.bin_name, 'install') -+ ) -+ -+ self.make_dicts( -+ pro_names.get(bin_info.pro_name).search_bin_name, -+ pro_names.get(bin_info.pro_name).source_name, -+ pro_names.get(bin_info.pro_name).search_bin_version, -+ bin_info.bin_name, -+ 'install' -+ ) -+ # Process data that is compile-dependent -+ for src_info in result_2_src: -+ if src_info.bebuild_src_name not in self.source_name_set: -+ self.source_name_set.add(src_info.bebuild_src_name) -+ source_name_list.append(src_info.bebuild_src_name) -+ -+ if src_info.pro_name not in self.comm_install_builds: -+ self.comm_install_builds[src_info.pro_name] = { -+ (src_info.bebuild_src_name, 'build') -+ } -+ elif (src_info.bebuild_src_name, 'build') not in \ -+ self.comm_install_builds[src_info.pro_name]: -+ -+ self.comm_install_builds[src_info.pro_name].add( -+ (src_info.bebuild_src_name, 'build') -+ ) -+ -+ self.make_dicts( -+ pro_names.get(src_info.pro_name).search_bin_name, -+ pro_names.get(src_info.pro_name).source_name, -+ pro_names.get(src_info.pro_name).search_bin_version, -+ src_info.bebuild_src_name, -+ 'build' -+ ) -+ # Recursively query all source packages that need to be looked up -+ if source_name_list: -+ self._provides_bedepend( - source_name_list, data_base, package_type="src") -- if len(bin_name_list) != 0: -- self.package_bedepend(bin_name_list, data_base, package_type="bin") -+ # Recursively query all binary packages that need to be looked up -+ if bin_name_list: -+ self._provides_bedepend( -+ bin_name_list, data_base, package_type="bin") - - def make_dicts(self, key, source_name, version, parent_node, be_type): - """ -@@ -210,29 +313,27 @@ - source_name, - version, - self.db_name, -- [ -- [parent_node, -+ { -+ (parent_node, - be_type -- ] -- ] -+ ) -+ } -+ - ] - else: - if be_type and parent_node: -- if [None, None] in self.result_dict[key][-1]: -- self.result_dict.pop(key) -- self.result_dict[key] = [ -- source_name, -- version, -- self.db_name, -- [ -- [parent_node, -- be_type -- ] -- ] -- ] -+ if (None, None) in self.result_dict[key][-1]: -+ self.result_dict[key][-1] = { -+ ( -+ parent_node, -+ be_type -+ ) -+ } - -- elif [parent_node, be_type] not in self.result_dict[key][-1]: -- self.result_dict[key][-1].append([ -- parent_node, -- be_type -- ]) -+ elif (parent_node, be_type) not in self.result_dict[key][-1]: -+ self.result_dict[key][-1].add( -+ ( -+ parent_node, -+ be_type -+ ) -+ ) -diff -Naru a/packageship/libs/dbutils/sqlalchemy_helper.py b/packageship/libs/dbutils/sqlalchemy_helper.py ---- a/packageship/libs/dbutils/sqlalchemy_helper.py 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/libs/dbutils/sqlalchemy_helper.py 2020-09-22 23:52:23.031681622 +0800 -@@ -9,6 +9,7 @@ - from sqlalchemy.orm import sessionmaker - from sqlalchemy.exc import SQLAlchemyError - from sqlalchemy.exc import DisconnectionError -+from sqlalchemy.exc import OperationalError - from sqlalchemy.ext.declarative import declarative_base - from sqlalchemy.engine.url import URL - from packageship.libs.exception.ext import Error -@@ -252,6 +253,8 @@ - - except SQLAlchemyError as sql_error: - self.session.rollback() -+ if isinstance(sql_error, OperationalError): -+ raise OperationalError - raise Error(sql_error) - else: - self.session.commit() -diff -Naru a/packageship/pkgshipd b/packageship/pkgshipd ---- a/packageship/pkgshipd 2020-09-22 23:34:04.037937224 +0800 -+++ b/packageship/pkgshipd 2020-09-22 23:51:57.323547247 +0800 -@@ -1,6 +1,18 @@ - #!/bin/bash - SYS_PATH=/etc/pkgship - OUT_PATH=/var/run/pkgship_uwsgi -+ -+MEM_THRESHOLD='700' -+MEM_FREE=`free -m | grep "Mem" | awk '{print $7}'` -+ -+if [ $1 = "start" ] -+then -+ if [ $MEM_FREE -lt $MEM_THRESHOLD ]; then -+ echo "[ERROR] pkgship tool does not support memory less than ${MEM_THRESHOLD} MB." -+ exit 0 -+ fi -+fi -+ - if [ ! -d "$OUT_PATH" ]; then - mkdir $OUT_PATH - fi -diff -Naru a/test/common_files/package.ini b/test/common_files/package.ini ---- a/test/common_files/package.ini 2020-09-22 23:34:04.041937245 +0800 -+++ b/test/common_files/package.ini 2020-09-22 23:50:56.559229634 +0800 -@@ -1,30 +1,30 @@ --[SYSTEM] --init_conf_path = C:\Users\TAO\Desktop\pkgship-1.1.0\test\common_files\conf.yaml --write_port = 8080 --query_port = 8090 --write_ip_addr = 127.0.0.1 --query_ip_addr = 127.0.0.1 --remote_host = https://api.openeuler.org/pkgmanage -- --[LOG] --log_level = INFO --log_name = log_info.log --backup_count = 10 --max_bytes = 314572800 -- --[UWSGI] --daemonize = /var/log/uwsgi.log --buffer-size = 65536 --http-timeout = 600 --harakiri = 600 -- --[TIMEDTASK] --open = True --hour = 3 --minute = 0 -- --[LIFECYCLE] --warehouse_remote = https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/ --pool_workers = 10 --warehouse = src-openeuler -- -+[SYSTEM] -+init_conf_path = -+write_port = 8080 -+query_port = 8090 -+write_ip_addr = 127.0.0.1 -+query_ip_addr = 127.0.0.1 -+remote_host = https://api.openeuler.org/pkgmanage -+ -+[LOG] -+log_level = INFO -+log_name = log_info.log -+backup_count = 10 -+max_bytes = 314572800 -+ -+[UWSGI] -+daemonize = /var/log/uwsgi.log -+buffer-size = 65536 -+http-timeout = 600 -+harakiri = 600 -+ -+[TIMEDTASK] -+open = True -+hour = 3 -+minute = 0 -+ -+[LIFECYCLE] -+warehouse_remote = https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/ -+pool_workers = 10 -+warehouse = src-openeuler -+ diff --git a/0007-correct-the-parameter-transfer-method-and-change-the-status-recording-method.patch b/0007-correct-the-parameter-transfer-method-and-change-the-status-recording-method.patch deleted file mode 100644 index f3f6480..0000000 --- a/0007-correct-the-parameter-transfer-method-and-change-the-status-recording-method.patch +++ /dev/null @@ -1,211 +0,0 @@ -diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/build_depend.py pkg/packageship/application/apps/package/function/build_depend.py ---- pkgship-1.1.0/packageship/application/apps/package/function/build_depend.py 2020-10-13 13:57:13.529049796 +0800 -+++ pkg/packageship/application/apps/package/function/build_depend.py 2020-10-13 13:58:37.670278333 +0800 -@@ -89,9 +89,9 @@ - build_list, - not_fd_com_build, - pk_v -- ) = self.search_db.get_build_depend(pkg_list, self.__already_pk_val) -+ ) = self.search_db.get_build_depend(pkg_list, pk_value=self.__already_pk_val) - -- self.__already_pk_val += pk_v -+ self.__already_pk_val = pk_v - self.not_found_components.update(not_fd_com_build) - if not build_list: - return res_status if res_status == ResponseCode.DIS_CONNECTION_DB else \ -@@ -102,8 +102,8 @@ - - code, res_dict, not_fd_com_install = \ - InstallDepend(self.db_list).query_install_depend(search_list, -- self.history_dicts, -- self.__already_pk_val) -+ history_pk_val=self.__already_pk_val, -+ history_dicts=self.history_dicts) - self.not_found_components.update(not_fd_com_install) - if not res_dict: - return code -@@ -206,8 +206,8 @@ - not_fd_com, - pk_v - ) = self.search_db.get_build_depend(pkg_name_li, -- self.__already_pk_val) -- self.__already_pk_val += pk_v -+ pk_value=self.__already_pk_val) -+ self.__already_pk_val = pk_v - self.not_found_components.update(not_fd_com) - if not bin_info_lis: - return -diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/install_depend.py pkg/packageship/application/apps/package/function/install_depend.py ---- pkgship-1.1.0/packageship/application/apps/package/function/install_depend.py 2020-10-13 13:57:13.529049796 +0800 -+++ pkg/packageship/application/apps/package/function/install_depend.py 2020-10-13 13:58:37.680278477 +0800 -@@ -68,7 +68,7 @@ - self.__search_list.append(binary) - else: - LOGGER.logger.warning("There is a NONE in input value: %s", str(binary_list)) -- self.__already_pk_value += history_pk_val if history_pk_val else [] -+ self.__already_pk_value = history_pk_val if history_pk_val else [] - while self.__search_list: - self.__query_single_install_dep(history_dicts) - return ResponseCode.SUCCESS, self.binary_dict.dictionary, self.not_found_components -@@ -82,14 +82,11 @@ - response_code: response code - Raises: - """ -- result_list, not_found_components, pk_val = map( -- set, -- self.__search_db.get_install_depend(self.__search_list, -- self.__already_pk_value) -- ) -- -+ res_list, not_found_components, pk_val = self.__search_db.get_install_depend(self.__search_list, -+ pk_value=self.__already_pk_value) -+ result_list = set(res_list) - self.not_found_components.update(not_found_components) -- self.__already_pk_value += pk_val -+ self.__already_pk_value = pk_val - for search in self.__search_list: - if search not in self.binary_dict.dictionary: - self.binary_dict.init_key(key=search, parent_node=[]) -diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py pkg/packageship/application/apps/package/function/searchdb.py ---- pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py 2020-10-13 13:57:13.529049796 +0800 -+++ pkg/packageship/application/apps/package/function/searchdb.py 2020-10-13 13:58:37.680278477 +0800 -@@ -94,7 +94,7 @@ - - for db_name, data_base in self.db_object_dict.items(): - try: -- req_set = self._get_requires(search_set, data_base, _tp='install') -+ req_set = self._get_requires(search_set, data_base, search_type='install') - - if not req_set: - continue -@@ -104,7 +104,7 @@ - pk_v, - not_fd_com) = self._get_provides_req_info(req_set, - data_base, -- pk_val) -+ pk_value=pk_val) - pk_val += pk_v - res_list, get_list = self._comb_install_list(depend_set, - req_pk_dict, -@@ -121,7 +121,7 @@ - if not search_set: - result_list.extend( - self._get_install_pro_in_other_database(provides_not_found, -- db_name) -+ database_name=db_name) - ) - return result_list, set(provides_not_found.keys()), pk_val - -@@ -215,13 +215,13 @@ - - return ret_list, get_list - -- def _get_install_pro_in_other_database(self, not_found_binary, _db_name=None): -+ def _get_install_pro_in_other_database(self, not_found_binary, database_name=None): - """ - Description: Binary package name data not found in - the current database, go to other databases to try - Args: - not_found_binary: not_found_build These data cannot be found in the current database -- _db_name:current database name -+ database_name:current database name - Returns: - result_list :[return_tuple1,return_tuple2] package information - Raises: -@@ -242,7 +242,7 @@ - search_set = {k for k, _ in not_found_binary.items()} - - for db_name, data_base in self.db_object_dict.items(): -- if db_name == _db_name: -+ if db_name == database_name: - continue - - parm_tuple = namedtuple("in_tuple", 'req_name') -@@ -362,7 +362,7 @@ - for db_name, data_base in self.db_object_dict.items(): - - try: -- req_set = self._get_requires(s_name_set, data_base, _tp='build') -+ req_set = self._get_requires(s_name_set, data_base, search_type='build') - - if not req_set: - continue -@@ -384,7 +384,7 @@ - s_name_set.symmetric_difference_update(set(get_list)) - if not s_name_set: - build_list.extend( -- self._get_binary_in_other_database(provides_not_found, _db_name=db_name) -+ self._get_binary_in_other_database(provides_not_found, database_name=db_name) - ) - return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()), pk_val - -@@ -483,13 +483,13 @@ - - return ret_list, get_list - -- def _get_binary_in_other_database(self, not_found_binary, _db_name=None): -+ def _get_binary_in_other_database(self, not_found_binary, database_name=None): - """ - Description: Binary package name data not found in - the current database, go to other databases to try - Args: - not_found_binary: not_found_build These data cannot be found in the current database -- _db_name:current database name -+ database_name:current database name - Returns: - result_list :[return_tuple1,return_tuple2] package information - Raises: -@@ -513,7 +513,7 @@ - - for db_name, data_base in self.db_object_dict.items(): - -- if db_name == _db_name: -+ if db_name == database_name: - continue - - in_tuple = namedtuple("in_tuple", 'req_name') -@@ -600,20 +600,20 @@ - - # Common methods for install and build - @staticmethod -- def _get_requires(search_set, data_base, _tp=None): -+ def _get_requires(search_set, data_base, search_type=None): - """ - Description: Query the dependent components of the current package - Args: - search_set: The package name to be queried - data_base:current database object -- _tp:type options build or install -+ search_type: type options build or install - Returns: - req_set:List Package information and corresponding component information - Raises: - AttributeError: The object does not have this property - SQLAlchemyError: sqlalchemy error - """ -- if _tp == 'build': -+ if search_type == 'build': - sql_com = text(""" - SELECT DISTINCT - src_requires.NAME AS req_name, -@@ -623,7 +623,7 @@ - ( SELECT pkgKey, NAME, version, src_name FROM src_pack WHERE {} ) src - LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey; - """.format(literal_column('name').in_(search_set))) -- elif _tp == 'install': -+ elif search_type == 'install': - sql_com = text(""" - SELECT DISTINCT - bin_requires.NAME AS req_name, -diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/self_depend.py pkg/packageship/application/apps/package/function/self_depend.py ---- pkgship-1.1.0/packageship/application/apps/package/function/self_depend.py 2020-10-13 13:57:13.529049796 +0800 -+++ pkg/packageship/application/apps/package/function/self_depend.py 2020-10-13 13:58:37.690278620 +0800 -@@ -143,7 +143,7 @@ - self.result_tmp.clear() - _, self.result_tmp, not_fd_com = \ - install_depend(self.db_list).query_install_depend(self.search_install_list, -- self.binary_dict.dictionary) -+ history_dicts=self.binary_dict.dictionary) - self.not_found_components.update(not_fd_com) - self.search_install_list.clear() - for key, values in self.result_tmp.items(): diff --git a/0008-fix-selfbuild-error-message.patch b/0008-fix-selfbuild-error-message.patch deleted file mode 100644 index 30ec7a4..0000000 --- a/0008-fix-selfbuild-error-message.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py pkgship/packageship/application/apps/package/function/searchdb.py ---- pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py 2020-09-25 17:28:16.230216100 +0800 -+++ pkgship/packageship/application/apps/package/function/searchdb.py 2020-09-25 17:30:48.456873100 +0800 -@@ -909,6 +909,8 @@ - current_app.logger.error(attr_error) - except SQLAlchemyError as sql_error: - current_app.logger.error(sql_error) -+ if not result_list: -+ return ResponseCode.PACK_NAME_NOT_FOUND, result_list - return_tuple = namedtuple( - 'return_tuple', 'subpack_name sub_pack_version search_version search_name') - for search_name in search_set: diff --git a/0009-optimize-log-records-when-obtaining-issue-content.patch b/0009-optimize-log-records-when-obtaining-issue-content.patch deleted file mode 100644 index 2b6c0cc..0000000 --- a/0009-optimize-log-records-when-obtaining-issue-content.patch +++ /dev/null @@ -1,134 +0,0 @@ -diff --git a/packageship/application/apps/lifecycle/function/gitee.py b/packageship/application/apps/lifecycle/function/gitee.py -index 4ac077f..8ca4ccf 100644 ---- a/packageship/application/apps/lifecycle/function/gitee.py -+++ b/packageship/application/apps/lifecycle/function/gitee.py -@@ -8,6 +8,7 @@ from json import JSONDecodeError - from retrying import retry - import requests - from requests.exceptions import HTTPError -+from requests.exceptions import RequestException - from sqlalchemy.exc import SQLAlchemyError - from packageship.libs.dbutils import DBHelper - from packageship.libs.configutils.readconfig import ReadConfig -@@ -42,6 +43,8 @@ class Gitee(): - "patch_files_path") - self.table_name = table_name - self.producer_consumer = ProducerConsumer() -+ self._issue_url = None -+ self.total_page = 0 - - def query_issues_info(self, issue_id=""): - """ -@@ -53,55 +56,58 @@ class Gitee(): - Raises: - - """ -- issue_url = self.api_url + \ -- "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id) -+ self._issue_url = self.api_url + \ -+ "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id) - try: -- response = requests.get( -- issue_url, params={"state": "all", "per_page": 100}) -- except Error as error: -+ response = self._request_issue(0) -+ except (HTTPError, RequestException) as error: - LOGGER.logger.error(error) - return None -- if response.status_code != 200: -- return None -- total_page = 1 if issue_id else int(response.headers['total_page']) -+ -+ self.total_page = 1 if issue_id else int( -+ response.headers['total_page']) - total_count = int(response.headers['total_count']) -+ - if total_count > 0: -- issue_list = self._query_per_page_issue_info(total_page, issue_url) -+ issue_list = self._query_per_page_issue_info() - if not issue_list: - LOGGER.logger.error( - "An error occurred while querying {}".format(self.repo)) - return None - self._save_issues(issue_list) - -- def _query_per_page_issue_info(self, total_page, issue_url): -+ @retry(stop_max_attempt_number=3, stop_max_delay=1000) -+ def _request_issue(self, page): -+ try: -+ response = requests.get(self._issue_url, -+ params={"state": "all", "per_page": 100, "page": page}) -+ except RequestException as error: -+ raise RequestException(error) -+ if response.status_code != 200: -+ _msg = "There is an exception with the remote service [%s]," \ -+ "Please try again later.The HTTP error code is:%s" % (self._issue_url, str( -+ response.status_code)) -+ raise HTTPError(_msg) -+ return response -+ -+ def _query_per_page_issue_info(self): - """ - Description: View the issue details - Args: - total_page: total page -- issue_url: issue url - - Returns: - - """ - issue_content_list = [] -- for i in range(1, total_page + 1): -- -- @retry(stop_max_attempt_number=3, stop_max_delay=1000) -- def request_issue(page, issue_url): -- try: -- response = requests.get(issue_url, -- params={"state": "all", "per_page": 100, "page": page}) -- except HTTPError: -- raise HTTPError('Network request error') -- return response -- -+ for i in range(1, self.total_page + 1): - try: -- response = request_issue(i, issue_url) -- if response.status_code != 200: -- LOGGER.logger.warning(response.content.decode("utf-8")) -- continue -+ response = self._request_issue(i) - issue_content_list.extend( - self.parse_issues_content(response.json())) -+ except (HTTPError, RequestException) as error: -+ LOGGER.logger.error(error) -+ continue - except (JSONDecodeError, Error) as error: - LOGGER.logger.error(error) - return issue_content_list -@@ -114,12 +120,9 @@ class Gitee(): - try: - def _save(issue_module): - with DBHelper(db_name='lifecycle') as database: -- - exist_issues = database.session.query(PackagesIssue).filter( - PackagesIssue.issue_id == issue_module['issue_id']).first() - if exist_issues: -- -- # Save the issue - for key, val in issue_module.items(): - setattr(exist_issues, key, val) - else: -@@ -130,11 +133,11 @@ class Gitee(): - with DBHelper(db_name='lifecycle') as database: - database.add(package_module) - -+ # Save the issue - for issue_item in issue_list: -- self.producer_consumer.put( -- (copy.deepcopy(issue_item), _save)) -+ self.producer_consumer.put((copy.deepcopy(issue_item), _save)) - -- # The number of various issues in the update package -+ # The number of various issues in the update package - self.pkg_info.defect = self.defect - self.pkg_info.feature = self.feature - self.pkg_info.cve = self.cve diff --git a/pkgship-1.1.0.tar.gz b/pkgship-1.1.0.tar.gz deleted file mode 100644 index 9e45007..0000000 Binary files a/pkgship-1.1.0.tar.gz and /dev/null differ diff --git a/pkgship-2.1.0.tar.gz b/pkgship-2.1.0.tar.gz new file mode 100644 index 0000000..eef5750 Binary files /dev/null and b/pkgship-2.1.0.tar.gz differ diff --git a/pkgship.spec b/pkgship.spec index 0abe1da..35b24ba 100644 --- a/pkgship.spec +++ b/pkgship.spec @@ -1,75 +1,92 @@ Name: pkgship -Version: 1.1.0 -Release: 14 +Version: 2.1.0 +Release: 4 Summary: Pkgship implements rpm package dependence ,maintainer, patch query and so no. License: Mulan 2.0 -URL: https://gitee.com/openeuler/openEuler-Advisor -Source0: https://gitee.com/openeuler/openEuler-Advisor/pkgship-%{version}.tar.gz +URL: https://gitee.com/openeuler/pkgship +Source0: https://gitee.com/openeuler/pkgship-%{version}.tar.gz -# Modify the query logic of package information, reduce redundant queries and align dnf query results, -# extract multiplexing functions, add corresponding docString, and clear pylint -Patch0: 0001-solve-installation-dependency-query-error.patch - -# Fix the problem of continuous spaces in message information in log records -Patch1: 0002-fix-the-problem-of-continuous-spaces.patch - -# When initializing logging, modify the incoming class object to an instance of the class, -# ensure the execution of internal functions,and read configuration file content -Patch2: 0003-fix-log_level-configuration-item-not-work.patch - -# Fix the error when executing query commands -Patch3: 0004-fix-the-error-when-executing-query-commands.patch - -# Add the judgment of whether the subpack_name attribute exists, fix the code indentation problem, -# and reduce the judgment branch of the old code. -Patch4: 0005-fix-the-error-when-source-package-has-no-sub-packages.patch - -# Solve the problem of data duplication, increase the maximum queue length judgment, -# and avoid occupying too much memory -Patch5: 0006-fix-memory_caused-service-crash-and-data-duplication-issue.patch - -# Fix the problem of function parameters -Patch6: 0007-correct-the-parameter-transfer-method-and-change-the-status-recording-method.patch - -# Fix the selfbuild error message -Patch7: 0008-fix-selfbuild-error-message.patch - -# Optimize-log-records-when-obtaining-issue-content -Patch8: 0009-optimize-log-records-when-obtaining-issue-content.patch BuildArch: noarch -BuildRequires: python3-flask-restful python3-flask python3 python3-pyyaml python3-sqlalchemy -BuildRequires: python3-prettytable python3-requests python3-flask-session python3-flask-script python3-marshmallow -BuildRequires: python3-Flask-APScheduler python3-pandas python3-retrying python3-xlrd python3-XlsxWriter -BuildRequires: python3-concurrent-log-handler -Requires: python3-pip python3-flask-restful python3-flask python3 python3-pyyaml -Requires: python3-sqlalchemy python3-prettytable python3-requests python3-concurrent-log-handler -Requires: python3-flask-session python3-flask-script python3-marshmallow python3-uWSGI -Requires: python3-pandas python3-dateutil python3-XlsxWriter python3-xlrd python3-Flask-APScheduler python3-retrying +BuildRequires: shadow +BuildRequires: python3-flask-restful python3-flask python3 python3-pyyaml python3-redis +BuildRequires: python3-prettytable python3-requests python3-retrying python3-coverage +BuildRequires: python3-marshmallow python3-uWSGI python3-gevent python3-Flask-Limiter +BuildRequires: python3-elasticsearch + +Requires: shadow +Requires: python3-flask-restful python3-flask python3 python3-pyyaml python3-redis +Requires: python3-prettytable python3-requests python3-retrying python3-coverage +Requires: python3-marshmallow python3-uWSGI python3-gevent python3-Flask-Limiter +Requires: python3-elasticsearch %description Pkgship implements rpm package dependence ,maintainer, patch query and so no. %prep -%autosetup -n pkgship-%{version} -p1 +%autosetup -n pkgship-%{version} %build %py3_build +current_path=`pwd` +cd $current_path'/packageship' +version_=%{version} +release_=%{release} +version_file=version.yaml +if [ -f "$version_file" ];then + rm -rf $version_file +fi +touch $version_file +echo "create version.yaml successfully." +echo "Version: $version_" >> $version_file +echo "Release: $release_" >> $version_file %install %py3_install %check -# The apscheduler cannot catch the local time, so a time zone must be assigned before running the test case. -export TZ=Asia/Shanghai -# change log_path to solve default log_path permission denied problem -log_path=`pwd`/tmp/ -sed -i "/\[LOG\]/a\log_path=$log_path" test/common_files/package.ini -%{__python3} -m unittest test/init_test.py -%{__python3} -m unittest test/read_test.py -%{__python3} -m unittest test/write_test.py -rm -rf $log_path +%{__python3} -m unittest test/coverage_count.py + +%pre +user=pkgshipuser +group=pkgshipuser + +# create group if not exists +egrep -w "^$group" /etc/group >& /dev/null +if [ $? -ne 0 ] +then + groupadd $group +fi + +# create user if not exists +egrep -w "^$user" /etc/passwd >& /dev/null +if [ $? -ne 0 ] +then + useradd -g $group $user +fi + + +# create dir or file if not exists +function create_dir_file(){ +if [ $3 = "d" ];then + if [ ! -d "$1" ];then + mkdir -p -m $2 $1 + fi +elif [ $3 = "f" ];then + if [ -f $1 ];then + rm -rf $1 + fi + touch $1 + chmod $2 $1 +fi +chown -R $user:$group $1 +} + +create_dir_file /opt/pkgship/ 750 d +create_dir_file /var/log/pkgship 750 d +create_dir_file /var/log/pkgship-operation 700 d +create_dir_file /etc/logrotate.d/pkgship 644 f %post @@ -78,12 +95,163 @@ rm -rf $log_path %files %doc README.md -%{python3_sitelib}/* -%attr(0755,root,root) %config %{_sysconfdir}/pkgship/* -%attr(0755,root,root) %{_bindir}/pkgshipd -%attr(0755,root,root) %{_bindir}/pkgship +%attr(0750,pkgshipuser,pkgshipuser) %{python3_sitelib}/* +%attr(0755,pkgshipuser,pkgshipuser) %config %{_sysconfdir}/pkgship/* +%attr(0755,pkgshipuser,pkgshipuser) %{_bindir}/pkgshipd +%attr(0755,pkgshipuser,pkgshipuser) %{_bindir}/pkgship +%attr(0750,root,root) /etc/pkgship/auto_install_pkgship_requires.sh +%attr(0640,pkgshipuser,pkgshipuser) /etc/pkgship/package.ini +%attr(0644,pkgshipuser,pkgshipuser) /etc/pkgship/conf.yaml +%attr(0640,pkgshipuser,pkgshipuser) /lib/systemd/system/pkgship.service %changelog +* Tue Mar 2 2021 Yiru Wang - 2.1.0-4 +- change pkgship-operation permission to 700 for get excute permission while creating files +- delete /home/pkgusers/log and /home/pkgusers/uswgi, which moved to /opt/pkgship/ + +* Mon Mar 1 2021 Yiru Wang - 2.1.0-3 +- change pkgship's files owner and permisson +- change pkgship's database from sqlite to elasticsearch +- modify pkgship's BuildRequires and Requires + +* Thu Jan 14 2021 Shenmei Tu +- Add unit test for all src packages interface + + Tue Jan 5 2021 Shenmei Tu +- Analyze bedepend and selfbuild dependency result for command line + +* Thu Dec 17 2020 Yiru Wang +- Add the basic schema file for pkgship based on elasticsearch + +* Wed Dec 23 2020 Pengju Jiang +- Four kinds of dependent zip download batch upload - Write the parsed data to CSV part of the code for uploading + +* Tue Dec 22 2020 Shenmei Tu +- Analyze install and build dependency result for command line + +* Mon Dec 21 2020 Chengqiang Bao +- SingleGraph interface should be modified in response to the modification of Level and Batch + +* Mon Dec 21 2020 Pengju Jiang +- Four kinds of dependent zip download batch upload - be_depend data parsing + +* Tue Dec 17 2020 Pengju Jiang +- Four kinds of dependent zip download batch upload - build dependent data parsing + +* Thu Dec 17 2020 Chengqiang Bao +- Add not_found_packages in output result for be depend interface + +* Thu Dec 17 2020 Chengqiang Bao +- Add level and batch query for dependinfo bedepend,installdepend,builddepend interface + +* Thu Dec 17 2020 Chengqiang Bao +- Add not_found_packages in output result for be depend interface + +* Tue Dec 15 2020 Shenmei Tu +- Add batch query for self depend interface and dependinfo self depend interface + +* Mon Dec 14 2020 Chengqiang Bao +- Add level and batch query for build depend interface + +* Mon Dec 14 2020 Shenmei Tu +- Add not_found_packages in output result for install depend interface + +* Fri Dec 11 2020 Shaowei Cheng +- Echo effect optimization,constants file extraction + +* Tue Dec 8 2020 Pengju Jiang +- Four kinds of dependent zip download batch upload - dependent data parsing + +* Fri Dec 4 2020 Shaowei Cheng +- Echo effect optimization + +* Thu Dec 03 2020 Chengqiang Bao +- Add level and batch query for be depend interface + +* Mon Nov 30 2020 Pengju Jiang +- Four kinds of dependent zip download batch upload - dependent data parsing + +* Mon Nov 30 2020 Shenmei Tu +- Add level and batch query for install depend interface + +* Mon Nov 30 2020 Pengju Jiang +- Modify the address of the database after successful initialization + +* Sat Nov 28 2020 Pengju Jiang +- Test case refactoring-upload in batches 5 + +* Sat Nov 28 2020 Shenmei Tu +- Test case refactoring-upload in batches 4 + +* Fir Nov 27 2020 Shenmei Tu +- Test case refactoring-upload in batches 3 + +* Thu Nov 26 2020 Chengqiang Bao +- Test case refactoring-upload in batches 2 + +* Wed Nov 25 2020 Shenmei Tu +- Test case refactoring-upload in batches 1 + +* Mon Nov 23 2020 Shenmei Tu +- Modification of add_sig_info interface bug, adding test cases for this interface + +* Mon Nov 18 2020 Pengju Jiang +- Upload zip file download in batches-basic code + +* Tue Nov 10 2020 Shenmei Tu +- New requirement: add filelist query interface + +* Wed Nov 4 2020 Chengqiang Bao +- pkgship add license to all files + +* Wed Nov 4 2020 Shaowei Cheng +- Solve the problem that the release time value cannot be obtained + +* Tue Nov 3 2020 Shaowei Cheng +- When the dependency graph in pkgship is aimed at the source code + package display, the build type package will be used as its next dependency + +* Tue Nov 3 2020 Yiru Wang +- Add the license file in the root directory of pkgship + +* Tue Nov 3 2020 Xinxing Li +- Add loading status and modify issue-list interface + +* Sat Oct 31 2020 Chengqiang Bao +- The bedepend interface adds exception capture and modifies the accuracy + of query results in special scenarios + +* Sat Oct 31 2020 Chengqiang Bao +- The web function adds an interface for obtaining installation dependent + results, an interface for obtaining compile dependent results, and an + interface for obtaining graphics. + +* Thu Oct 29 2020 Shenmei Tu +- New requirement: save "sig" information in the database + +* Thu Oct 29 2020 Pengju Jiang +- Unit test refactoring, unit test of three interfaces + +* Wed Oct 28 2020 Shaowei Cheng +- Improve the /lifeCycle/issueTrace interface in pkgship + +* Wed Oct 28 2020 Pengju Jiang +- Unit test reconstruction, basic framework submission + +* Wed Oct 28 2020 Zhengtang Gong +- pkgship initialization adds filelist data import, and replaces the + previous local sqlite file import method with the form of repo source + +* Thu Oct 22 2020 Pengju Jiang +- Solve the problem of crash when calling get_all_package_info and sing_pack, + and the problem of function return value error + +* Wed Oct 21 2020 Zhengtang Gong +- Modify the files involved in the configuration file + +* Wed Oct 21 2020 Shaowei Cheng +- Bug fix, add parameter checks of pagenum, pagesize + * Tue Oct 13 2020 ZhangTao 1.1.0-14 - correct-the-parameter-transfer-method-and-change-the-status-recording-method.