update pkgship to 2.1.0
This commit is contained in:
parent
56a933e5c2
commit
a76209b900
File diff suppressed because it is too large
Load Diff
@ -1,255 +0,0 @@
|
|||||||
diff --git a/packageship/application/initsystem/data_import.py b/packageship/application/initsystem/data_import.py
|
|
||||||
index c2169c1..a5846bd 100644
|
|
||||||
--- a/packageship/application/initsystem/data_import.py
|
|
||||||
+++ b/packageship/application/initsystem/data_import.py
|
|
||||||
@@ -84,8 +84,8 @@ class InitDataBase():
|
|
||||||
|
|
||||||
if not os.path.exists(self.config_file_path):
|
|
||||||
raise FileNotFoundError(
|
|
||||||
- 'system initialization configuration file \
|
|
||||||
- does not exist: %s' % self.config_file_path)
|
|
||||||
+ "system initialization configuration file"
|
|
||||||
+ "does not exist: %s" % self.config_file_path)
|
|
||||||
# load yaml configuration file
|
|
||||||
with open(self.config_file_path, 'r', encoding='utf-8') as file_context:
|
|
||||||
try:
|
|
||||||
@@ -93,24 +93,25 @@ class InitDataBase():
|
|
||||||
file_context.read(), Loader=yaml.FullLoader)
|
|
||||||
except yaml.YAMLError as yaml_error:
|
|
||||||
|
|
||||||
- raise ConfigurationException(' '.join("The format of the yaml configuration\
|
|
||||||
- file is wrong please check and try again:{0}".format(yaml_error).split()))
|
|
||||||
+ raise ConfigurationException(
|
|
||||||
+ "The format of the yaml configuration"
|
|
||||||
+ "file is wrong please check and try again:{0}".format(yaml_error))
|
|
||||||
|
|
||||||
if init_database_config is None:
|
|
||||||
raise ConfigurationException(
|
|
||||||
'The content of the database initialization configuration file cannot be empty')
|
|
||||||
if not isinstance(init_database_config, list):
|
|
||||||
raise ConfigurationException(
|
|
||||||
- ' '.join('The format of the initial database configuration file\
|
|
||||||
- is incorrect.When multiple databases need to be initialized, \
|
|
||||||
- it needs to be configured in the form of multiple \
|
|
||||||
- nodes:{}'.format(self.config_file_path).split()))
|
|
||||||
+ "The format of the initial database configuration file"
|
|
||||||
+ "is incorrect.When multiple databases need to be initialized,"
|
|
||||||
+ "it needs to be configured in the form of multiple"
|
|
||||||
+ "nodes:{}".format(self.config_file_path))
|
|
||||||
for config_item in init_database_config:
|
|
||||||
if not isinstance(config_item, dict):
|
|
||||||
- raise ConfigurationException(' '.join('The format of the initial database\
|
|
||||||
- configuration file is incorrect, and the value in a single node should\
|
|
||||||
- be presented in the form of key - val pairs: \
|
|
||||||
- {}'.format(self.config_file_path).split()))
|
|
||||||
+ raise ConfigurationException(
|
|
||||||
+ "The format of the initial database"
|
|
||||||
+ "configuration file is incorrect, and the value in a single node should"
|
|
||||||
+ "be presented in the form of key - val pairs:{}".format(self.config_file_path))
|
|
||||||
return init_database_config
|
|
||||||
|
|
||||||
def init_data(self):
|
|
||||||
@@ -122,8 +123,8 @@ class InitDataBase():
|
|
||||||
"""
|
|
||||||
if getattr(self, 'config_file_datas', None) is None or \
|
|
||||||
self.config_file_datas is None:
|
|
||||||
- raise ContentNoneException('The content of the database initialization \
|
|
||||||
- configuration file is empty')
|
|
||||||
+ raise ContentNoneException("The content of the database initialization"
|
|
||||||
+ "configuration file is empty")
|
|
||||||
|
|
||||||
if self.__exists_repeat_database():
|
|
||||||
raise DatabaseRepeatException(
|
|
||||||
@@ -139,13 +140,13 @@ class InitDataBase():
|
|
||||||
continue
|
|
||||||
priority = database_config.get('priority')
|
|
||||||
if not isinstance(priority, int) or priority < 0 or priority > 100:
|
|
||||||
- LOGGER.logger.error('The priority value type in the database initialization \
|
|
||||||
- configuration file is incorrect')
|
|
||||||
+ LOGGER.logger.error("The priority value type in the database initialization"
|
|
||||||
+ "configuration file is incorrect")
|
|
||||||
continue
|
|
||||||
lifecycle_status_val = database_config.get('lifecycle')
|
|
||||||
if lifecycle_status_val not in ('enable', 'disable'):
|
|
||||||
- LOGGER.logger.error('The status value of the life cycle in the initialization\
|
|
||||||
- configuration file can only be enable or disable')
|
|
||||||
+ LOGGER.logger.error("The value of the life cycle in the initialization"
|
|
||||||
+ "configuration file can only be enable or disable")
|
|
||||||
continue
|
|
||||||
# Initialization data
|
|
||||||
self._init_data(database_config)
|
|
||||||
@@ -163,8 +164,8 @@ class InitDataBase():
|
|
||||||
"""
|
|
||||||
_database_engine = self._database_engine.get(self.db_type)
|
|
||||||
if not _database_engine:
|
|
||||||
- raise Error('The database engine is set incorrectly, \
|
|
||||||
- currently only the following engines are supported: %s '
|
|
||||||
+ raise Error("The database engine is set incorrectly,"
|
|
||||||
+ "currently only the following engines are supported: %s "
|
|
||||||
% '、'.join(self._database_engine.keys()))
|
|
||||||
_create_table_result = _database_engine(
|
|
||||||
db_name=db_name, tables=tables, storage=storage).create_database(self)
|
|
||||||
@@ -200,11 +201,12 @@ class InitDataBase():
|
|
||||||
|
|
||||||
if src_db_file is None or bin_db_file is None:
|
|
||||||
raise ContentNoneException(
|
|
||||||
- 'The path to the sqlite file in the database initialization configuration \
|
|
||||||
- is incorrect ')
|
|
||||||
+ "The path to the sqlite file in the database initialization"
|
|
||||||
+ "configuration is incorrect ")
|
|
||||||
if not os.path.exists(src_db_file) or not os.path.exists(bin_db_file):
|
|
||||||
- raise FileNotFoundError("sqlite file {src} or {bin} does not exist, please \
|
|
||||||
- check and try again".format(src=src_db_file, bin=bin_db_file))
|
|
||||||
+ raise FileNotFoundError(
|
|
||||||
+ "sqlite file {src} or {bin} does not exist, please"
|
|
||||||
+ "check and try again".format(src=src_db_file, bin=bin_db_file))
|
|
||||||
# 3. Obtain temporary source package files and binary package files
|
|
||||||
if self.__save_data(database_config,
|
|
||||||
self.database_name):
|
|
||||||
@@ -314,23 +316,20 @@ class InitDataBase():
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db_name: Saved database name
|
|
||||||
- Returns:
|
|
||||||
-
|
|
||||||
- Raises:
|
|
||||||
-
|
|
||||||
"""
|
|
||||||
# Query all source packages
|
|
||||||
self.sql = " select * from packages "
|
|
||||||
packages_datas = self.__get_data()
|
|
||||||
if packages_datas is None:
|
|
||||||
raise ContentNoneException(
|
|
||||||
- '{db_name}:There is no relevant data in the source \
|
|
||||||
- package provided '.format(db_name=db_name))
|
|
||||||
+ "{db_name}:There is no relevant data in the source "
|
|
||||||
+ "package provided ".format(db_name=db_name))
|
|
||||||
for index, src_package_item in enumerate(packages_datas):
|
|
||||||
try:
|
|
||||||
src_package_name = '-'.join([src_package_item.get('name'),
|
|
||||||
src_package_item.get('version'),
|
|
||||||
- src_package_item.get('release') + '.src.rpm'
|
|
||||||
+ src_package_item.get(
|
|
||||||
+ 'release') + '.src.rpm'
|
|
||||||
])
|
|
||||||
except AttributeError as exception_msg:
|
|
||||||
src_package_name = None
|
|
||||||
@@ -391,8 +390,9 @@ class InitDataBase():
|
|
||||||
self.sql = " select * from requires "
|
|
||||||
requires_datas = self.__get_data()
|
|
||||||
if requires_datas is None:
|
|
||||||
- raise ContentNoneException('{db_name}: The package data that the source package \
|
|
||||||
- depends on is empty'.format(db_name=db_name))
|
|
||||||
+ raise ContentNoneException(
|
|
||||||
+ "{db_name}: The package data that the source package "
|
|
||||||
+ "depends on is empty".format(db_name=db_name))
|
|
||||||
with DBHelper(db_name=db_name) as database:
|
|
||||||
database.batch_add(requires_datas, SrcRequires)
|
|
||||||
|
|
||||||
@@ -411,8 +411,8 @@ class InitDataBase():
|
|
||||||
bin_packaegs = self.__get_data()
|
|
||||||
if bin_packaegs is None:
|
|
||||||
raise ContentNoneException(
|
|
||||||
- '{db_name}:There is no relevant data in the provided \
|
|
||||||
- binary package '.format(db_name=db_name))
|
|
||||||
+ "{db_name}:There is no relevant data in the provided "
|
|
||||||
+ "binary package ".format(db_name=db_name))
|
|
||||||
for index, bin_package_item in enumerate(bin_packaegs):
|
|
||||||
try:
|
|
||||||
src_package_name = bin_package_item.get('rpm_sourcerpm').split(
|
|
||||||
@@ -441,8 +441,8 @@ class InitDataBase():
|
|
||||||
requires_datas = self.__get_data()
|
|
||||||
if requires_datas is None:
|
|
||||||
raise ContentNoneException(
|
|
||||||
- '{db_name}:There is no relevant data in the provided binary \
|
|
||||||
- dependency package'.format(db_name=db_name))
|
|
||||||
+ "{db_name}:There is no relevant data in the provided binary "
|
|
||||||
+ "dependency package".format(db_name=db_name))
|
|
||||||
|
|
||||||
with DBHelper(db_name=db_name) as database:
|
|
||||||
database.batch_add(requires_datas, BinRequires)
|
|
||||||
@@ -462,8 +462,8 @@ class InitDataBase():
|
|
||||||
provides_datas = self.__get_data()
|
|
||||||
if provides_datas is None:
|
|
||||||
raise ContentNoneException(
|
|
||||||
- '{db_name}:There is no relevant data in the provided \
|
|
||||||
- binary component '.format(db_name=db_name))
|
|
||||||
+ "{db_name}:There is no relevant data in the provided "
|
|
||||||
+ "binary component ".format(db_name=db_name))
|
|
||||||
|
|
||||||
with DBHelper(db_name=db_name) as database:
|
|
||||||
database.batch_add(provides_datas, BinProvides)
|
|
||||||
@@ -474,8 +474,8 @@ class InitDataBase():
|
|
||||||
files_datas = self.__get_data()
|
|
||||||
if files_datas is None:
|
|
||||||
raise ContentNoneException(
|
|
||||||
- '{db_name}:There is no relevant binary file installation\
|
|
||||||
- path data in the provided database '.format(db_name=db_name))
|
|
||||||
+ "{db_name}:There is no relevant binary file installation "
|
|
||||||
+ "path data in the provided database ".format(db_name=db_name))
|
|
||||||
|
|
||||||
with DBHelper(db_name=db_name) as database:
|
|
||||||
database.batch_add(files_datas, BinFiles)
|
|
||||||
diff --git a/packageship/libs/dbutils/sqlalchemy_helper.py b/packageship/libs/dbutils/sqlalchemy_helper.py
|
|
||||||
index a0b22e2..d18b115 100644
|
|
||||||
--- a/packageship/libs/dbutils/sqlalchemy_helper.py
|
|
||||||
+++ b/packageship/libs/dbutils/sqlalchemy_helper.py
|
|
||||||
@@ -279,8 +279,8 @@ class DBHelper(BaseHelper):
|
|
||||||
|
|
||||||
if not isinstance(dicts, list):
|
|
||||||
raise TypeError(
|
|
||||||
- 'The input for bulk insertion must be a dictionary \
|
|
||||||
- list with the same fields as the current entity')
|
|
||||||
+ "The input for bulk insertion must be a dictionary"
|
|
||||||
+ "list with the same fields as the current entity")
|
|
||||||
try:
|
|
||||||
self.session.execute(
|
|
||||||
model.__table__.insert(),
|
|
||||||
diff --git a/packageship/pkgship.py b/packageship/pkgship.py
|
|
||||||
index 884b2ab..f9408c8 100644
|
|
||||||
--- a/packageship/pkgship.py
|
|
||||||
+++ b/packageship/pkgship.py
|
|
||||||
@@ -25,8 +25,8 @@ try:
|
|
||||||
|
|
||||||
LOGGER = Log(__name__)
|
|
||||||
except ImportError as import_error:
|
|
||||||
- print('Error importing related dependencies, \
|
|
||||||
- please check if related dependencies are installed')
|
|
||||||
+ print("Error importing related dependencies,"
|
|
||||||
+ "please check if related dependencies are installed")
|
|
||||||
else:
|
|
||||||
from packageship.application.apps.package.function.constants import ResponseCode
|
|
||||||
from packageship.application.apps.package.function.constants import ListNode
|
|
||||||
@@ -230,7 +230,9 @@ class PkgshipCommand(BaseCommand):
|
|
||||||
if package_all.get("not_found_components"):
|
|
||||||
print("Problem: Not Found Components")
|
|
||||||
for not_found_com in package_all.get("not_found_components"):
|
|
||||||
- print(" - nothing provides {} needed by {} ".format(not_found_com, params.packagename))
|
|
||||||
+ print(
|
|
||||||
+ " - nothing provides {} needed by {} ".
|
|
||||||
+ format(not_found_com, params.packagename))
|
|
||||||
package_all = package_all.get("build_dict")
|
|
||||||
|
|
||||||
for bin_package, package_depend in package_all.items():
|
|
||||||
@@ -835,7 +837,9 @@ class InstallDepCommand(PkgshipCommand):
|
|
||||||
if package_all.get("not_found_components"):
|
|
||||||
print("Problem: Not Found Components")
|
|
||||||
for not_found_com in package_all.get("not_found_components"):
|
|
||||||
- print(" - nothing provides {} needed by {} ".format(not_found_com, params.packagename))
|
|
||||||
+ print(
|
|
||||||
+ " - nothing provides {} needed by {} ".
|
|
||||||
+ format(not_found_com, params.packagename))
|
|
||||||
for bin_package, package_depend in package_all.get("install_dict").items():
|
|
||||||
# distinguish whether the current data is the data of the root node
|
|
||||||
if isinstance(package_depend, list) and package_depend[-1][0][0] != 'root':
|
|
||||||
@@ -1061,7 +1065,9 @@ class SelfBuildCommand(PkgshipCommand):
|
|
||||||
if package_all.get("not_found_components"):
|
|
||||||
print("Problem: Not Found Components")
|
|
||||||
for not_found_com in package_all.get("not_found_components"):
|
|
||||||
- print(" - nothing provides {} needed by {} ".format(not_found_com, params.packagename))
|
|
||||||
+ print(
|
|
||||||
+ " - nothing provides {} needed by {} ".
|
|
||||||
+ format(not_found_com, params.packagename))
|
|
||||||
bin_package_count = self._parse_bin_package(
|
|
||||||
package_all.get('binary_dicts'))
|
|
||||||
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
diff --git a/packageship/application/__init__.py b/packageship/application/__init__.py
|
|
||||||
index 1361058..6a57a2e 100644
|
|
||||||
--- a/packageship/application/__init__.py
|
|
||||||
+++ b/packageship/application/__init__.py
|
|
||||||
@@ -2,8 +2,6 @@
|
|
||||||
"""
|
|
||||||
Initial operation and configuration of the flask project
|
|
||||||
"""
|
|
||||||
-import sys
|
|
||||||
-import threading
|
|
||||||
from flask import Flask
|
|
||||||
from flask_session import Session
|
|
||||||
from flask_apscheduler import APScheduler
|
|
||||||
@@ -19,7 +17,9 @@ def _timed_task(app):
|
|
||||||
"""
|
|
||||||
Timed task function
|
|
||||||
"""
|
|
||||||
- from .apps.lifecycle.function.download_yaml import update_pkg_info # pylint: disable=import-outside-toplevel
|
|
||||||
+ # disable=import-outside-toplevel Avoid circular import problems,so import inside the function
|
|
||||||
+ # pylint: disable=import-outside-toplevel
|
|
||||||
+ from packageship.application.apps.lifecycle.function.download_yaml import update_pkg_info
|
|
||||||
|
|
||||||
_readconfig = ReadConfig(system_config.SYS_CONFIG_PATH)
|
|
||||||
try:
|
|
||||||
@@ -34,6 +34,7 @@ def _timed_task(app):
|
|
||||||
if _minute < 0 or _minute > 59:
|
|
||||||
_minute = 0
|
|
||||||
|
|
||||||
+ # disable=no-member Dynamic variable pylint is not recognized
|
|
||||||
app.apscheduler.add_job( # pylint: disable=no-member
|
|
||||||
func=update_pkg_info, id="update_package_data", trigger="cron", hour=_hour, minute=_minute)
|
|
||||||
app.apscheduler.add_job( # pylint: disable=no-member
|
|
||||||
@@ -52,7 +53,8 @@ def init_app(operation):
|
|
||||||
app = Flask(__name__)
|
|
||||||
|
|
||||||
# log configuration
|
|
||||||
- app.logger.addHandler(setup_log(Config))
|
|
||||||
+ # disable=no-member Dynamic variable pylint is not recognized
|
|
||||||
+ app.logger.addHandler(setup_log(Config())) # pylint: disable=no-member
|
|
||||||
|
|
||||||
# Load configuration items
|
|
||||||
|
|
||||||
@@ -66,10 +68,12 @@ def init_app(operation):
|
|
||||||
# Open session function
|
|
||||||
Session(app)
|
|
||||||
|
|
||||||
+ # Variables OPERATION need to be modified within the function and imported in other modules
|
|
||||||
global OPERATION # pylint: disable=global-statement
|
|
||||||
OPERATION = operation
|
|
||||||
|
|
||||||
# Register Blueprint
|
|
||||||
+ # disable=import-outside-toplevel Avoid circular import problems,so import inside the function
|
|
||||||
from packageship.application import apps # pylint: disable=import-outside-toplevel
|
|
||||||
for blue, api in apps.blue_point:
|
|
||||||
api.init_app(app)
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
diff --git a/packageship/application/apps/package/function/packages.py b/packageship/application/apps/package/function/packages.py
|
|
||||||
index eb96087..d36fc34 100644
|
|
||||||
--- a/packageship/application/apps/package/function/packages.py
|
|
||||||
+++ b/packageship/application/apps/package/function/packages.py
|
|
||||||
@@ -313,7 +313,8 @@ def _sub_pack(src_name, table_name):
|
|
||||||
pro_info = res[pro_obj.sub_name]["provides"]
|
|
||||||
if pro_obj.sub_pro_name in pro_info:
|
|
||||||
pro_info[pro_obj.sub_pro_name]["requiredby"].update(
|
|
||||||
- {pro_obj.sub_reqby_name: pro_obj.sub_reqby_name})
|
|
||||||
+ {pro_obj.sub_reqby_name: pro_obj.sub_reqby_name}
|
|
||||||
+ if pro_obj.sub_reqby_name else {})
|
|
||||||
else:
|
|
||||||
pro_info.update(
|
|
||||||
{
|
|
||||||
@@ -368,7 +369,8 @@ def _sub_pack(src_name, table_name):
|
|
||||||
req_info = sub_pkg_info["requires"]
|
|
||||||
if req_obj.sub_req_name in req_info:
|
|
||||||
req_info[req_obj.sub_req_name]["providedby"].update(
|
|
||||||
- {req_obj.sub_proby_name: req_obj.sub_proby_name})
|
|
||||||
+ {req_obj.sub_proby_name: req_obj.sub_proby_name}
|
|
||||||
+ if req_obj.sub_proby_name else {})
|
|
||||||
else:
|
|
||||||
req_info.update(
|
|
||||||
{
|
|
||||||
@ -1,62 +0,0 @@
|
|||||||
diff --git a/packageship/application/apps/package/function/self_depend.py b/packageship/application/apps/package/function/self_depend.py
|
|
||||||
index 1ec4c28..b06b950 100644
|
|
||||||
--- a/packageship/application/apps/package/function/self_depend.py
|
|
||||||
+++ b/packageship/application/apps/package/function/self_depend.py
|
|
||||||
@@ -106,16 +106,20 @@ class SelfDepend():
|
|
||||||
"""
|
|
||||||
if packtype == 'source':
|
|
||||||
response_code, subpack_list = self.search_db.get_sub_pack([packname])
|
|
||||||
- if subpack_list:
|
|
||||||
- for subpack_tuple, dbname in subpack_list:
|
|
||||||
- self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version)
|
|
||||||
- if dbname != 'NOT FOUND':
|
|
||||||
- self.binary_dict.append_bin(key=subpack_tuple.subpack_name,
|
|
||||||
- src=packname,
|
|
||||||
- version=subpack_tuple.search_version,
|
|
||||||
- dbname=dbname)
|
|
||||||
- else:
|
|
||||||
- return ResponseCode.PACK_NAME_NOT_FOUND
|
|
||||||
+ if not subpack_list:
|
|
||||||
+ return ResponseCode.PACK_NAME_NOT_FOUND
|
|
||||||
+
|
|
||||||
+ for subpack_tuple, dbname in subpack_list:
|
|
||||||
+ self.source_dicts.append_src(packname, dbname, subpack_tuple.search_version)
|
|
||||||
+ if dbname == 'NOT FOUND':
|
|
||||||
+ continue
|
|
||||||
+
|
|
||||||
+ if subpack_tuple.subpack_name and subpack_tuple.subpack_name \
|
|
||||||
+ not in self.binary_dict.dictionary:
|
|
||||||
+ self.binary_dict.append_bin(key=subpack_tuple.subpack_name,
|
|
||||||
+ src=packname,
|
|
||||||
+ version=subpack_tuple.search_version,
|
|
||||||
+ dbname=dbname)
|
|
||||||
|
|
||||||
else:
|
|
||||||
response_code, dbname, source_name, version = \
|
|
||||||
@@ -178,15 +182,17 @@ class SelfDepend():
|
|
||||||
self.search_subpack_list.remove(None)
|
|
||||||
_, result_list = self.search_db.get_sub_pack(self.search_subpack_list)
|
|
||||||
for subpack_tuple, dbname in result_list:
|
|
||||||
- if dbname != 'NOT FOUND':
|
|
||||||
- if subpack_tuple.subpack_name and subpack_tuple.subpack_name \
|
|
||||||
- not in self.binary_dict.dictionary:
|
|
||||||
- self.binary_dict.append_bin(key=subpack_tuple.subpack_name,
|
|
||||||
- src=subpack_tuple.search_name,
|
|
||||||
- version=subpack_tuple.sub_pack_version,
|
|
||||||
- dbname=dbname,
|
|
||||||
- parent_node=[subpack_tuple.search_name, 'Subpack'])
|
|
||||||
- self.search_install_list.append(subpack_tuple.subpack_name)
|
|
||||||
+ if dbname == 'NOT FOUND':
|
|
||||||
+ continue
|
|
||||||
+
|
|
||||||
+ if subpack_tuple.subpack_name and subpack_tuple.subpack_name \
|
|
||||||
+ not in self.binary_dict.dictionary:
|
|
||||||
+ self.binary_dict.append_bin(key=subpack_tuple.subpack_name,
|
|
||||||
+ src=subpack_tuple.search_name,
|
|
||||||
+ version=subpack_tuple.sub_pack_version,
|
|
||||||
+ dbname=dbname,
|
|
||||||
+ parent_node=[subpack_tuple.search_name, 'Subpack'])
|
|
||||||
+ self.search_install_list.append(subpack_tuple.subpack_name)
|
|
||||||
self.search_subpack_list.clear()
|
|
||||||
|
|
||||||
def query_build(self, selfbuild):
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,211 +0,0 @@
|
|||||||
diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/build_depend.py pkg/packageship/application/apps/package/function/build_depend.py
|
|
||||||
--- pkgship-1.1.0/packageship/application/apps/package/function/build_depend.py 2020-10-13 13:57:13.529049796 +0800
|
|
||||||
+++ pkg/packageship/application/apps/package/function/build_depend.py 2020-10-13 13:58:37.670278333 +0800
|
|
||||||
@@ -89,9 +89,9 @@
|
|
||||||
build_list,
|
|
||||||
not_fd_com_build,
|
|
||||||
pk_v
|
|
||||||
- ) = self.search_db.get_build_depend(pkg_list, self.__already_pk_val)
|
|
||||||
+ ) = self.search_db.get_build_depend(pkg_list, pk_value=self.__already_pk_val)
|
|
||||||
|
|
||||||
- self.__already_pk_val += pk_v
|
|
||||||
+ self.__already_pk_val = pk_v
|
|
||||||
self.not_found_components.update(not_fd_com_build)
|
|
||||||
if not build_list:
|
|
||||||
return res_status if res_status == ResponseCode.DIS_CONNECTION_DB else \
|
|
||||||
@@ -102,8 +102,8 @@
|
|
||||||
|
|
||||||
code, res_dict, not_fd_com_install = \
|
|
||||||
InstallDepend(self.db_list).query_install_depend(search_list,
|
|
||||||
- self.history_dicts,
|
|
||||||
- self.__already_pk_val)
|
|
||||||
+ history_pk_val=self.__already_pk_val,
|
|
||||||
+ history_dicts=self.history_dicts)
|
|
||||||
self.not_found_components.update(not_fd_com_install)
|
|
||||||
if not res_dict:
|
|
||||||
return code
|
|
||||||
@@ -206,8 +206,8 @@
|
|
||||||
not_fd_com,
|
|
||||||
pk_v
|
|
||||||
) = self.search_db.get_build_depend(pkg_name_li,
|
|
||||||
- self.__already_pk_val)
|
|
||||||
- self.__already_pk_val += pk_v
|
|
||||||
+ pk_value=self.__already_pk_val)
|
|
||||||
+ self.__already_pk_val = pk_v
|
|
||||||
self.not_found_components.update(not_fd_com)
|
|
||||||
if not bin_info_lis:
|
|
||||||
return
|
|
||||||
diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/install_depend.py pkg/packageship/application/apps/package/function/install_depend.py
|
|
||||||
--- pkgship-1.1.0/packageship/application/apps/package/function/install_depend.py 2020-10-13 13:57:13.529049796 +0800
|
|
||||||
+++ pkg/packageship/application/apps/package/function/install_depend.py 2020-10-13 13:58:37.680278477 +0800
|
|
||||||
@@ -68,7 +68,7 @@
|
|
||||||
self.__search_list.append(binary)
|
|
||||||
else:
|
|
||||||
LOGGER.logger.warning("There is a NONE in input value: %s", str(binary_list))
|
|
||||||
- self.__already_pk_value += history_pk_val if history_pk_val else []
|
|
||||||
+ self.__already_pk_value = history_pk_val if history_pk_val else []
|
|
||||||
while self.__search_list:
|
|
||||||
self.__query_single_install_dep(history_dicts)
|
|
||||||
return ResponseCode.SUCCESS, self.binary_dict.dictionary, self.not_found_components
|
|
||||||
@@ -82,14 +82,11 @@
|
|
||||||
response_code: response code
|
|
||||||
Raises:
|
|
||||||
"""
|
|
||||||
- result_list, not_found_components, pk_val = map(
|
|
||||||
- set,
|
|
||||||
- self.__search_db.get_install_depend(self.__search_list,
|
|
||||||
- self.__already_pk_value)
|
|
||||||
- )
|
|
||||||
-
|
|
||||||
+ res_list, not_found_components, pk_val = self.__search_db.get_install_depend(self.__search_list,
|
|
||||||
+ pk_value=self.__already_pk_value)
|
|
||||||
+ result_list = set(res_list)
|
|
||||||
self.not_found_components.update(not_found_components)
|
|
||||||
- self.__already_pk_value += pk_val
|
|
||||||
+ self.__already_pk_value = pk_val
|
|
||||||
for search in self.__search_list:
|
|
||||||
if search not in self.binary_dict.dictionary:
|
|
||||||
self.binary_dict.init_key(key=search, parent_node=[])
|
|
||||||
diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py pkg/packageship/application/apps/package/function/searchdb.py
|
|
||||||
--- pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py 2020-10-13 13:57:13.529049796 +0800
|
|
||||||
+++ pkg/packageship/application/apps/package/function/searchdb.py 2020-10-13 13:58:37.680278477 +0800
|
|
||||||
@@ -94,7 +94,7 @@
|
|
||||||
|
|
||||||
for db_name, data_base in self.db_object_dict.items():
|
|
||||||
try:
|
|
||||||
- req_set = self._get_requires(search_set, data_base, _tp='install')
|
|
||||||
+ req_set = self._get_requires(search_set, data_base, search_type='install')
|
|
||||||
|
|
||||||
if not req_set:
|
|
||||||
continue
|
|
||||||
@@ -104,7 +104,7 @@
|
|
||||||
pk_v,
|
|
||||||
not_fd_com) = self._get_provides_req_info(req_set,
|
|
||||||
data_base,
|
|
||||||
- pk_val)
|
|
||||||
+ pk_value=pk_val)
|
|
||||||
pk_val += pk_v
|
|
||||||
res_list, get_list = self._comb_install_list(depend_set,
|
|
||||||
req_pk_dict,
|
|
||||||
@@ -121,7 +121,7 @@
|
|
||||||
if not search_set:
|
|
||||||
result_list.extend(
|
|
||||||
self._get_install_pro_in_other_database(provides_not_found,
|
|
||||||
- db_name)
|
|
||||||
+ database_name=db_name)
|
|
||||||
)
|
|
||||||
return result_list, set(provides_not_found.keys()), pk_val
|
|
||||||
|
|
||||||
@@ -215,13 +215,13 @@
|
|
||||||
|
|
||||||
return ret_list, get_list
|
|
||||||
|
|
||||||
- def _get_install_pro_in_other_database(self, not_found_binary, _db_name=None):
|
|
||||||
+ def _get_install_pro_in_other_database(self, not_found_binary, database_name=None):
|
|
||||||
"""
|
|
||||||
Description: Binary package name data not found in
|
|
||||||
the current database, go to other databases to try
|
|
||||||
Args:
|
|
||||||
not_found_binary: not_found_build These data cannot be found in the current database
|
|
||||||
- _db_name:current database name
|
|
||||||
+ database_name:current database name
|
|
||||||
Returns:
|
|
||||||
result_list :[return_tuple1,return_tuple2] package information
|
|
||||||
Raises:
|
|
||||||
@@ -242,7 +242,7 @@
|
|
||||||
search_set = {k for k, _ in not_found_binary.items()}
|
|
||||||
|
|
||||||
for db_name, data_base in self.db_object_dict.items():
|
|
||||||
- if db_name == _db_name:
|
|
||||||
+ if db_name == database_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
parm_tuple = namedtuple("in_tuple", 'req_name')
|
|
||||||
@@ -362,7 +362,7 @@
|
|
||||||
for db_name, data_base in self.db_object_dict.items():
|
|
||||||
|
|
||||||
try:
|
|
||||||
- req_set = self._get_requires(s_name_set, data_base, _tp='build')
|
|
||||||
+ req_set = self._get_requires(s_name_set, data_base, search_type='build')
|
|
||||||
|
|
||||||
if not req_set:
|
|
||||||
continue
|
|
||||||
@@ -384,7 +384,7 @@
|
|
||||||
s_name_set.symmetric_difference_update(set(get_list))
|
|
||||||
if not s_name_set:
|
|
||||||
build_list.extend(
|
|
||||||
- self._get_binary_in_other_database(provides_not_found, _db_name=db_name)
|
|
||||||
+ self._get_binary_in_other_database(provides_not_found, database_name=db_name)
|
|
||||||
)
|
|
||||||
return ResponseCode.SUCCESS, build_list, set(provides_not_found.keys()), pk_val
|
|
||||||
|
|
||||||
@@ -483,13 +483,13 @@
|
|
||||||
|
|
||||||
return ret_list, get_list
|
|
||||||
|
|
||||||
- def _get_binary_in_other_database(self, not_found_binary, _db_name=None):
|
|
||||||
+ def _get_binary_in_other_database(self, not_found_binary, database_name=None):
|
|
||||||
"""
|
|
||||||
Description: Binary package name data not found in
|
|
||||||
the current database, go to other databases to try
|
|
||||||
Args:
|
|
||||||
not_found_binary: not_found_build These data cannot be found in the current database
|
|
||||||
- _db_name:current database name
|
|
||||||
+ database_name:current database name
|
|
||||||
Returns:
|
|
||||||
result_list :[return_tuple1,return_tuple2] package information
|
|
||||||
Raises:
|
|
||||||
@@ -513,7 +513,7 @@
|
|
||||||
|
|
||||||
for db_name, data_base in self.db_object_dict.items():
|
|
||||||
|
|
||||||
- if db_name == _db_name:
|
|
||||||
+ if db_name == database_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
in_tuple = namedtuple("in_tuple", 'req_name')
|
|
||||||
@@ -600,20 +600,20 @@
|
|
||||||
|
|
||||||
# Common methods for install and build
|
|
||||||
@staticmethod
|
|
||||||
- def _get_requires(search_set, data_base, _tp=None):
|
|
||||||
+ def _get_requires(search_set, data_base, search_type=None):
|
|
||||||
"""
|
|
||||||
Description: Query the dependent components of the current package
|
|
||||||
Args:
|
|
||||||
search_set: The package name to be queried
|
|
||||||
data_base:current database object
|
|
||||||
- _tp:type options build or install
|
|
||||||
+ search_type: type options build or install
|
|
||||||
Returns:
|
|
||||||
req_set:List Package information and corresponding component information
|
|
||||||
Raises:
|
|
||||||
AttributeError: The object does not have this property
|
|
||||||
SQLAlchemyError: sqlalchemy error
|
|
||||||
"""
|
|
||||||
- if _tp == 'build':
|
|
||||||
+ if search_type == 'build':
|
|
||||||
sql_com = text("""
|
|
||||||
SELECT DISTINCT
|
|
||||||
src_requires.NAME AS req_name,
|
|
||||||
@@ -623,7 +623,7 @@
|
|
||||||
( SELECT pkgKey, NAME, version, src_name FROM src_pack WHERE {} ) src
|
|
||||||
LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey;
|
|
||||||
""".format(literal_column('name').in_(search_set)))
|
|
||||||
- elif _tp == 'install':
|
|
||||||
+ elif search_type == 'install':
|
|
||||||
sql_com = text("""
|
|
||||||
SELECT DISTINCT
|
|
||||||
bin_requires.NAME AS req_name,
|
|
||||||
diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/self_depend.py pkg/packageship/application/apps/package/function/self_depend.py
|
|
||||||
--- pkgship-1.1.0/packageship/application/apps/package/function/self_depend.py 2020-10-13 13:57:13.529049796 +0800
|
|
||||||
+++ pkg/packageship/application/apps/package/function/self_depend.py 2020-10-13 13:58:37.690278620 +0800
|
|
||||||
@@ -143,7 +143,7 @@
|
|
||||||
self.result_tmp.clear()
|
|
||||||
_, self.result_tmp, not_fd_com = \
|
|
||||||
install_depend(self.db_list).query_install_depend(self.search_install_list,
|
|
||||||
- self.binary_dict.dictionary)
|
|
||||||
+ history_dicts=self.binary_dict.dictionary)
|
|
||||||
self.not_found_components.update(not_fd_com)
|
|
||||||
self.search_install_list.clear()
|
|
||||||
for key, values in self.result_tmp.items():
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
diff -Naru pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py pkgship/packageship/application/apps/package/function/searchdb.py
|
|
||||||
--- pkgship-1.1.0/packageship/application/apps/package/function/searchdb.py 2020-09-25 17:28:16.230216100 +0800
|
|
||||||
+++ pkgship/packageship/application/apps/package/function/searchdb.py 2020-09-25 17:30:48.456873100 +0800
|
|
||||||
@@ -909,6 +909,8 @@
|
|
||||||
current_app.logger.error(attr_error)
|
|
||||||
except SQLAlchemyError as sql_error:
|
|
||||||
current_app.logger.error(sql_error)
|
|
||||||
+ if not result_list:
|
|
||||||
+ return ResponseCode.PACK_NAME_NOT_FOUND, result_list
|
|
||||||
return_tuple = namedtuple(
|
|
||||||
'return_tuple', 'subpack_name sub_pack_version search_version search_name')
|
|
||||||
for search_name in search_set:
|
|
||||||
@ -1,134 +0,0 @@
|
|||||||
diff --git a/packageship/application/apps/lifecycle/function/gitee.py b/packageship/application/apps/lifecycle/function/gitee.py
|
|
||||||
index 4ac077f..8ca4ccf 100644
|
|
||||||
--- a/packageship/application/apps/lifecycle/function/gitee.py
|
|
||||||
+++ b/packageship/application/apps/lifecycle/function/gitee.py
|
|
||||||
@@ -8,6 +8,7 @@ from json import JSONDecodeError
|
|
||||||
from retrying import retry
|
|
||||||
import requests
|
|
||||||
from requests.exceptions import HTTPError
|
|
||||||
+from requests.exceptions import RequestException
|
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
|
||||||
from packageship.libs.dbutils import DBHelper
|
|
||||||
from packageship.libs.configutils.readconfig import ReadConfig
|
|
||||||
@@ -42,6 +43,8 @@ class Gitee():
|
|
||||||
"patch_files_path")
|
|
||||||
self.table_name = table_name
|
|
||||||
self.producer_consumer = ProducerConsumer()
|
|
||||||
+ self._issue_url = None
|
|
||||||
+ self.total_page = 0
|
|
||||||
|
|
||||||
def query_issues_info(self, issue_id=""):
|
|
||||||
"""
|
|
||||||
@@ -53,55 +56,58 @@ class Gitee():
|
|
||||||
Raises:
|
|
||||||
|
|
||||||
"""
|
|
||||||
- issue_url = self.api_url + \
|
|
||||||
- "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id)
|
|
||||||
+ self._issue_url = self.api_url + \
|
|
||||||
+ "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id)
|
|
||||||
try:
|
|
||||||
- response = requests.get(
|
|
||||||
- issue_url, params={"state": "all", "per_page": 100})
|
|
||||||
- except Error as error:
|
|
||||||
+ response = self._request_issue(0)
|
|
||||||
+ except (HTTPError, RequestException) as error:
|
|
||||||
LOGGER.logger.error(error)
|
|
||||||
return None
|
|
||||||
- if response.status_code != 200:
|
|
||||||
- return None
|
|
||||||
- total_page = 1 if issue_id else int(response.headers['total_page'])
|
|
||||||
+
|
|
||||||
+ self.total_page = 1 if issue_id else int(
|
|
||||||
+ response.headers['total_page'])
|
|
||||||
total_count = int(response.headers['total_count'])
|
|
||||||
+
|
|
||||||
if total_count > 0:
|
|
||||||
- issue_list = self._query_per_page_issue_info(total_page, issue_url)
|
|
||||||
+ issue_list = self._query_per_page_issue_info()
|
|
||||||
if not issue_list:
|
|
||||||
LOGGER.logger.error(
|
|
||||||
"An error occurred while querying {}".format(self.repo))
|
|
||||||
return None
|
|
||||||
self._save_issues(issue_list)
|
|
||||||
|
|
||||||
- def _query_per_page_issue_info(self, total_page, issue_url):
|
|
||||||
+ @retry(stop_max_attempt_number=3, stop_max_delay=1000)
|
|
||||||
+ def _request_issue(self, page):
|
|
||||||
+ try:
|
|
||||||
+ response = requests.get(self._issue_url,
|
|
||||||
+ params={"state": "all", "per_page": 100, "page": page})
|
|
||||||
+ except RequestException as error:
|
|
||||||
+ raise RequestException(error)
|
|
||||||
+ if response.status_code != 200:
|
|
||||||
+ _msg = "There is an exception with the remote service [%s]," \
|
|
||||||
+ "Please try again later.The HTTP error code is:%s" % (self._issue_url, str(
|
|
||||||
+ response.status_code))
|
|
||||||
+ raise HTTPError(_msg)
|
|
||||||
+ return response
|
|
||||||
+
|
|
||||||
+ def _query_per_page_issue_info(self):
|
|
||||||
"""
|
|
||||||
Description: View the issue details
|
|
||||||
Args:
|
|
||||||
total_page: total page
|
|
||||||
- issue_url: issue url
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
|
|
||||||
"""
|
|
||||||
issue_content_list = []
|
|
||||||
- for i in range(1, total_page + 1):
|
|
||||||
-
|
|
||||||
- @retry(stop_max_attempt_number=3, stop_max_delay=1000)
|
|
||||||
- def request_issue(page, issue_url):
|
|
||||||
- try:
|
|
||||||
- response = requests.get(issue_url,
|
|
||||||
- params={"state": "all", "per_page": 100, "page": page})
|
|
||||||
- except HTTPError:
|
|
||||||
- raise HTTPError('Network request error')
|
|
||||||
- return response
|
|
||||||
-
|
|
||||||
+ for i in range(1, self.total_page + 1):
|
|
||||||
try:
|
|
||||||
- response = request_issue(i, issue_url)
|
|
||||||
- if response.status_code != 200:
|
|
||||||
- LOGGER.logger.warning(response.content.decode("utf-8"))
|
|
||||||
- continue
|
|
||||||
+ response = self._request_issue(i)
|
|
||||||
issue_content_list.extend(
|
|
||||||
self.parse_issues_content(response.json()))
|
|
||||||
+ except (HTTPError, RequestException) as error:
|
|
||||||
+ LOGGER.logger.error(error)
|
|
||||||
+ continue
|
|
||||||
except (JSONDecodeError, Error) as error:
|
|
||||||
LOGGER.logger.error(error)
|
|
||||||
return issue_content_list
|
|
||||||
@@ -114,12 +120,9 @@ class Gitee():
|
|
||||||
try:
|
|
||||||
def _save(issue_module):
|
|
||||||
with DBHelper(db_name='lifecycle') as database:
|
|
||||||
-
|
|
||||||
exist_issues = database.session.query(PackagesIssue).filter(
|
|
||||||
PackagesIssue.issue_id == issue_module['issue_id']).first()
|
|
||||||
if exist_issues:
|
|
||||||
-
|
|
||||||
- # Save the issue
|
|
||||||
for key, val in issue_module.items():
|
|
||||||
setattr(exist_issues, key, val)
|
|
||||||
else:
|
|
||||||
@@ -130,11 +133,11 @@ class Gitee():
|
|
||||||
with DBHelper(db_name='lifecycle') as database:
|
|
||||||
database.add(package_module)
|
|
||||||
|
|
||||||
+ # Save the issue
|
|
||||||
for issue_item in issue_list:
|
|
||||||
- self.producer_consumer.put(
|
|
||||||
- (copy.deepcopy(issue_item), _save))
|
|
||||||
+ self.producer_consumer.put((copy.deepcopy(issue_item), _save))
|
|
||||||
|
|
||||||
- # The number of various issues in the update package
|
|
||||||
+ # The number of various issues in the update package
|
|
||||||
self.pkg_info.defect = self.defect
|
|
||||||
self.pkg_info.feature = self.feature
|
|
||||||
self.pkg_info.cve = self.cve
|
|
||||||
Binary file not shown.
BIN
pkgship-2.1.0.tar.gz
Normal file
BIN
pkgship-2.1.0.tar.gz
Normal file
Binary file not shown.
280
pkgship.spec
280
pkgship.spec
@ -1,75 +1,92 @@
|
|||||||
Name: pkgship
|
Name: pkgship
|
||||||
Version: 1.1.0
|
Version: 2.1.0
|
||||||
Release: 14
|
Release: 4
|
||||||
Summary: Pkgship implements rpm package dependence ,maintainer, patch query and so no.
|
Summary: Pkgship implements rpm package dependence ,maintainer, patch query and so no.
|
||||||
License: Mulan 2.0
|
License: Mulan 2.0
|
||||||
URL: https://gitee.com/openeuler/openEuler-Advisor
|
URL: https://gitee.com/openeuler/pkgship
|
||||||
Source0: https://gitee.com/openeuler/openEuler-Advisor/pkgship-%{version}.tar.gz
|
Source0: https://gitee.com/openeuler/pkgship-%{version}.tar.gz
|
||||||
|
|
||||||
# Modify the query logic of package information, reduce redundant queries and align dnf query results,
|
|
||||||
# extract multiplexing functions, add corresponding docString, and clear pylint
|
|
||||||
Patch0: 0001-solve-installation-dependency-query-error.patch
|
|
||||||
|
|
||||||
# Fix the problem of continuous spaces in message information in log records
|
|
||||||
Patch1: 0002-fix-the-problem-of-continuous-spaces.patch
|
|
||||||
|
|
||||||
# When initializing logging, modify the incoming class object to an instance of the class,
|
|
||||||
# ensure the execution of internal functions,and read configuration file content
|
|
||||||
Patch2: 0003-fix-log_level-configuration-item-not-work.patch
|
|
||||||
|
|
||||||
# Fix the error when executing query commands
|
|
||||||
Patch3: 0004-fix-the-error-when-executing-query-commands.patch
|
|
||||||
|
|
||||||
# Add the judgment of whether the subpack_name attribute exists, fix the code indentation problem,
|
|
||||||
# and reduce the judgment branch of the old code.
|
|
||||||
Patch4: 0005-fix-the-error-when-source-package-has-no-sub-packages.patch
|
|
||||||
|
|
||||||
# Solve the problem of data duplication, increase the maximum queue length judgment,
|
|
||||||
# and avoid occupying too much memory
|
|
||||||
Patch5: 0006-fix-memory_caused-service-crash-and-data-duplication-issue.patch
|
|
||||||
|
|
||||||
# Fix the problem of function parameters
|
|
||||||
Patch6: 0007-correct-the-parameter-transfer-method-and-change-the-status-recording-method.patch
|
|
||||||
|
|
||||||
# Fix the selfbuild error message
|
|
||||||
Patch7: 0008-fix-selfbuild-error-message.patch
|
|
||||||
|
|
||||||
# Optimize-log-records-when-obtaining-issue-content
|
|
||||||
Patch8: 0009-optimize-log-records-when-obtaining-issue-content.patch
|
|
||||||
BuildArch: noarch
|
BuildArch: noarch
|
||||||
|
|
||||||
BuildRequires: python3-flask-restful python3-flask python3 python3-pyyaml python3-sqlalchemy
|
BuildRequires: shadow
|
||||||
BuildRequires: python3-prettytable python3-requests python3-flask-session python3-flask-script python3-marshmallow
|
BuildRequires: python3-flask-restful python3-flask python3 python3-pyyaml python3-redis
|
||||||
BuildRequires: python3-Flask-APScheduler python3-pandas python3-retrying python3-xlrd python3-XlsxWriter
|
BuildRequires: python3-prettytable python3-requests python3-retrying python3-coverage
|
||||||
BuildRequires: python3-concurrent-log-handler
|
BuildRequires: python3-marshmallow python3-uWSGI python3-gevent python3-Flask-Limiter
|
||||||
Requires: python3-pip python3-flask-restful python3-flask python3 python3-pyyaml
|
BuildRequires: python3-elasticsearch
|
||||||
Requires: python3-sqlalchemy python3-prettytable python3-requests python3-concurrent-log-handler
|
|
||||||
Requires: python3-flask-session python3-flask-script python3-marshmallow python3-uWSGI
|
Requires: shadow
|
||||||
Requires: python3-pandas python3-dateutil python3-XlsxWriter python3-xlrd python3-Flask-APScheduler python3-retrying
|
Requires: python3-flask-restful python3-flask python3 python3-pyyaml python3-redis
|
||||||
|
Requires: python3-prettytable python3-requests python3-retrying python3-coverage
|
||||||
|
Requires: python3-marshmallow python3-uWSGI python3-gevent python3-Flask-Limiter
|
||||||
|
Requires: python3-elasticsearch
|
||||||
|
|
||||||
%description
|
%description
|
||||||
Pkgship implements rpm package dependence ,maintainer, patch query and so no.
|
Pkgship implements rpm package dependence ,maintainer, patch query and so no.
|
||||||
|
|
||||||
%prep
|
%prep
|
||||||
%autosetup -n pkgship-%{version} -p1
|
%autosetup -n pkgship-%{version}
|
||||||
|
|
||||||
%build
|
%build
|
||||||
%py3_build
|
%py3_build
|
||||||
|
current_path=`pwd`
|
||||||
|
cd $current_path'/packageship'
|
||||||
|
version_=%{version}
|
||||||
|
release_=%{release}
|
||||||
|
version_file=version.yaml
|
||||||
|
if [ -f "$version_file" ];then
|
||||||
|
rm -rf $version_file
|
||||||
|
fi
|
||||||
|
touch $version_file
|
||||||
|
echo "create version.yaml successfully."
|
||||||
|
echo "Version: $version_" >> $version_file
|
||||||
|
echo "Release: $release_" >> $version_file
|
||||||
|
|
||||||
%install
|
%install
|
||||||
%py3_install
|
%py3_install
|
||||||
|
|
||||||
|
|
||||||
%check
|
%check
|
||||||
# The apscheduler cannot catch the local time, so a time zone must be assigned before running the test case.
|
%{__python3} -m unittest test/coverage_count.py
|
||||||
export TZ=Asia/Shanghai
|
|
||||||
# change log_path to solve default log_path permission denied problem
|
%pre
|
||||||
log_path=`pwd`/tmp/
|
user=pkgshipuser
|
||||||
sed -i "/\[LOG\]/a\log_path=$log_path" test/common_files/package.ini
|
group=pkgshipuser
|
||||||
%{__python3} -m unittest test/init_test.py
|
|
||||||
%{__python3} -m unittest test/read_test.py
|
# create group if not exists
|
||||||
%{__python3} -m unittest test/write_test.py
|
egrep -w "^$group" /etc/group >& /dev/null
|
||||||
rm -rf $log_path
|
if [ $? -ne 0 ]
|
||||||
|
then
|
||||||
|
groupadd $group
|
||||||
|
fi
|
||||||
|
|
||||||
|
# create user if not exists
|
||||||
|
egrep -w "^$user" /etc/passwd >& /dev/null
|
||||||
|
if [ $? -ne 0 ]
|
||||||
|
then
|
||||||
|
useradd -g $group $user
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# create dir or file if not exists
|
||||||
|
function create_dir_file(){
|
||||||
|
if [ $3 = "d" ];then
|
||||||
|
if [ ! -d "$1" ];then
|
||||||
|
mkdir -p -m $2 $1
|
||||||
|
fi
|
||||||
|
elif [ $3 = "f" ];then
|
||||||
|
if [ -f $1 ];then
|
||||||
|
rm -rf $1
|
||||||
|
fi
|
||||||
|
touch $1
|
||||||
|
chmod $2 $1
|
||||||
|
fi
|
||||||
|
chown -R $user:$group $1
|
||||||
|
}
|
||||||
|
|
||||||
|
create_dir_file /opt/pkgship/ 750 d
|
||||||
|
create_dir_file /var/log/pkgship 750 d
|
||||||
|
create_dir_file /var/log/pkgship-operation 700 d
|
||||||
|
create_dir_file /etc/logrotate.d/pkgship 644 f
|
||||||
|
|
||||||
%post
|
%post
|
||||||
|
|
||||||
@ -78,12 +95,163 @@ rm -rf $log_path
|
|||||||
|
|
||||||
%files
|
%files
|
||||||
%doc README.md
|
%doc README.md
|
||||||
%{python3_sitelib}/*
|
%attr(0750,pkgshipuser,pkgshipuser) %{python3_sitelib}/*
|
||||||
%attr(0755,root,root) %config %{_sysconfdir}/pkgship/*
|
%attr(0755,pkgshipuser,pkgshipuser) %config %{_sysconfdir}/pkgship/*
|
||||||
%attr(0755,root,root) %{_bindir}/pkgshipd
|
%attr(0755,pkgshipuser,pkgshipuser) %{_bindir}/pkgshipd
|
||||||
%attr(0755,root,root) %{_bindir}/pkgship
|
%attr(0755,pkgshipuser,pkgshipuser) %{_bindir}/pkgship
|
||||||
|
%attr(0750,root,root) /etc/pkgship/auto_install_pkgship_requires.sh
|
||||||
|
%attr(0640,pkgshipuser,pkgshipuser) /etc/pkgship/package.ini
|
||||||
|
%attr(0644,pkgshipuser,pkgshipuser) /etc/pkgship/conf.yaml
|
||||||
|
%attr(0640,pkgshipuser,pkgshipuser) /lib/systemd/system/pkgship.service
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Tue Mar 2 2021 Yiru Wang <wangyiru1@huawei.com> - 2.1.0-4
|
||||||
|
- change pkgship-operation permission to 700 for get excute permission while creating files
|
||||||
|
- delete /home/pkgusers/log and /home/pkgusers/uswgi, which moved to /opt/pkgship/
|
||||||
|
|
||||||
|
* Mon Mar 1 2021 Yiru Wang <wangyiru1@huawei.com> - 2.1.0-3
|
||||||
|
- change pkgship's files owner and permisson
|
||||||
|
- change pkgship's database from sqlite to elasticsearch
|
||||||
|
- modify pkgship's BuildRequires and Requires
|
||||||
|
|
||||||
|
* Thu Jan 14 2021 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Add unit test for all src packages interface
|
||||||
|
|
||||||
|
Tue Jan 5 2021 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Analyze bedepend and selfbuild dependency result for command line
|
||||||
|
|
||||||
|
* Thu Dec 17 2020 Yiru Wang <wangyiru1@huawei.com>
|
||||||
|
- Add the basic schema file for pkgship based on elasticsearch
|
||||||
|
|
||||||
|
* Wed Dec 23 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Four kinds of dependent zip download batch upload - Write the parsed data to CSV part of the code for uploading
|
||||||
|
|
||||||
|
* Tue Dec 22 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Analyze install and build dependency result for command line
|
||||||
|
|
||||||
|
* Mon Dec 21 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- SingleGraph interface should be modified in response to the modification of Level and Batch
|
||||||
|
|
||||||
|
* Mon Dec 21 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Four kinds of dependent zip download batch upload - be_depend data parsing
|
||||||
|
|
||||||
|
* Tue Dec 17 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Four kinds of dependent zip download batch upload - build dependent data parsing
|
||||||
|
|
||||||
|
* Thu Dec 17 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- Add not_found_packages in output result for be depend interface
|
||||||
|
|
||||||
|
* Thu Dec 17 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- Add level and batch query for dependinfo bedepend,installdepend,builddepend interface
|
||||||
|
|
||||||
|
* Thu Dec 17 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- Add not_found_packages in output result for be depend interface
|
||||||
|
|
||||||
|
* Tue Dec 15 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Add batch query for self depend interface and dependinfo self depend interface
|
||||||
|
|
||||||
|
* Mon Dec 14 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- Add level and batch query for build depend interface
|
||||||
|
|
||||||
|
* Mon Dec 14 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Add not_found_packages in output result for install depend interface
|
||||||
|
|
||||||
|
* Fri Dec 11 2020 Shaowei Cheng <chenshaowei3@huawei.com>
|
||||||
|
- Echo effect optimization,constants file extraction
|
||||||
|
|
||||||
|
* Tue Dec 8 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Four kinds of dependent zip download batch upload - dependent data parsing
|
||||||
|
|
||||||
|
* Fri Dec 4 2020 Shaowei Cheng <chenshaowei3@huawei.com>
|
||||||
|
- Echo effect optimization
|
||||||
|
|
||||||
|
* Thu Dec 03 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- Add level and batch query for be depend interface
|
||||||
|
|
||||||
|
* Mon Nov 30 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Four kinds of dependent zip download batch upload - dependent data parsing
|
||||||
|
|
||||||
|
* Mon Nov 30 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Add level and batch query for install depend interface
|
||||||
|
|
||||||
|
* Mon Nov 30 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Modify the address of the database after successful initialization
|
||||||
|
|
||||||
|
* Sat Nov 28 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Test case refactoring-upload in batches 5
|
||||||
|
|
||||||
|
* Sat Nov 28 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Test case refactoring-upload in batches 4
|
||||||
|
|
||||||
|
* Fir Nov 27 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Test case refactoring-upload in batches 3
|
||||||
|
|
||||||
|
* Thu Nov 26 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- Test case refactoring-upload in batches 2
|
||||||
|
|
||||||
|
* Wed Nov 25 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Test case refactoring-upload in batches 1
|
||||||
|
|
||||||
|
* Mon Nov 23 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- Modification of add_sig_info interface bug, adding test cases for this interface
|
||||||
|
|
||||||
|
* Mon Nov 18 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Upload zip file download in batches-basic code
|
||||||
|
|
||||||
|
* Tue Nov 10 2020 Shenmei Tu <tushenmei@huawei.com>
|
||||||
|
- New requirement: add filelist query interface
|
||||||
|
|
||||||
|
* Wed Nov 4 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- pkgship add license to all files
|
||||||
|
|
||||||
|
* Wed Nov 4 2020 Shaowei Cheng <chenshaowei3@huawei.com>
|
||||||
|
- Solve the problem that the release time value cannot be obtained
|
||||||
|
|
||||||
|
* Tue Nov 3 2020 Shaowei Cheng <chenshaowei3@huawei.com>
|
||||||
|
- When the dependency graph in pkgship is aimed at the source code
|
||||||
|
package display, the build type package will be used as its next dependency
|
||||||
|
|
||||||
|
* Tue Nov 3 2020 Yiru Wang <wangyiru1@huawei.com>
|
||||||
|
- Add the license file in the root directory of pkgship
|
||||||
|
|
||||||
|
* Tue Nov 3 2020 Xinxing Li <lixinxing6@huawei.com>
|
||||||
|
- Add loading status and modify issue-list interface
|
||||||
|
|
||||||
|
* Sat Oct 31 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- The bedepend interface adds exception capture and modifies the accuracy
|
||||||
|
of query results in special scenarios
|
||||||
|
|
||||||
|
* Sat Oct 31 2020 Chengqiang Bao <baochengqiang1@huawei.com>
|
||||||
|
- The web function adds an interface for obtaining installation dependent
|
||||||
|
results, an interface for obtaining compile dependent results, and an
|
||||||
|
interface for obtaining graphics.
|
||||||
|
|
||||||
|
* Thu Oct 29 2020 Shenmei Tu<tushenmei@huawei.com>
|
||||||
|
- New requirement: save "sig" information in the database
|
||||||
|
|
||||||
|
* Thu Oct 29 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Unit test refactoring, unit test of three interfaces
|
||||||
|
|
||||||
|
* Wed Oct 28 2020 Shaowei Cheng <chenshaowei3@huawei.com>
|
||||||
|
- Improve the /lifeCycle/issueTrace interface in pkgship
|
||||||
|
|
||||||
|
* Wed Oct 28 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Unit test reconstruction, basic framework submission
|
||||||
|
|
||||||
|
* Wed Oct 28 2020 Zhengtang Gong <gongzhengtang@huawei.com>
|
||||||
|
- pkgship initialization adds filelist data import, and replaces the
|
||||||
|
previous local sqlite file import method with the form of repo source
|
||||||
|
|
||||||
|
* Thu Oct 22 2020 Pengju Jiang <jiangpengju2@huawei.com>
|
||||||
|
- Solve the problem of crash when calling get_all_package_info and sing_pack,
|
||||||
|
and the problem of function return value error
|
||||||
|
|
||||||
|
* Wed Oct 21 2020 Zhengtang Gong <gongzhengtang@huawei.com>
|
||||||
|
- Modify the files involved in the configuration file
|
||||||
|
|
||||||
|
* Wed Oct 21 2020 Shaowei Cheng <chenshaowei3@huawei.com>
|
||||||
|
- Bug fix, add parameter checks of pagenum, pagesize
|
||||||
|
|
||||||
* Tue Oct 13 2020 ZhangTao <zhangtao307@huawei.com> 1.1.0-14
|
* Tue Oct 13 2020 ZhangTao <zhangtao307@huawei.com> 1.1.0-14
|
||||||
- correct-the-parameter-transfer-method-and-change-the-status-recording-method.
|
- correct-the-parameter-transfer-method-and-change-the-status-recording-method.
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user