From d532b1e5aa064c7f365d74efaa4203e6886f16f8 Mon Sep 17 00:00:00 2001 From: gongzt Date: Wed, 15 Jul 2020 14:32:13 +0800 Subject: [PATCH 01/19] =?UTF-8?q?=E6=9C=AC=E5=9C=B0=E6=B5=8B=E8=AF=95?= =?UTF-8?q?=E7=9A=84=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packageship/packageship/system_config.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/packageship/packageship/system_config.py b/packageship/packageship/system_config.py index 14de44a9..a6ab994c 100644 --- a/packageship/packageship/system_config.py +++ b/packageship/packageship/system_config.py @@ -1,8 +1,8 @@ #!/usr/bin/python3 -""" -Description:System-level file configuration, mainly configure +''' +System-level file configuration, mainly configure the address of the operating environment, commonly used variables, etc. -""" +''' import os import sys @@ -16,18 +16,19 @@ else: # system configuration file path -SYS_CONFIG_PATH = os.path.join('/', 'etc', 'pkgship', 'package.ini') +SYS_CONFIG_PATH = os.path.join(BASE_PATH, 'package.ini') + # data file after successful data import DATABASE_FILE_INFO = os.path.join( - '/', 'var', 'run', 'database_file_info.yaml') + BASE_PATH, 'pkgship_dbs', 'database_file_info.yaml') # If the path of the imported database is not specified in the configuration file, the # configuration in the system is used by default -DATABASE_FOLDER_PATH = os.path.join('/', 'var', 'run', 'pkgship_dbs') +DATABASE_FOLDER_PATH = os.path.join(BASE_PATH, 'pkgship_dbs') # If the directory of log storage is not configured, # it will be stored in the following directory specified by the system by default -LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'pkgship') +LOG_FOLDER_PATH = os.path.join(BASE_PATH, 'logs') -- Gitee From 18df4e81348ea2cdcd5c0a10894ec6dc177115a4 Mon Sep 17 00:00:00 2001 From: gongzt Date: Wed, 15 Jul 2020 19:19:38 +0800 Subject: [PATCH 02/19] Database initialization business logic changes --- .../application/initsystem/data_import.py | 645 ++++++------------ .../application/initsystem/datamerge.py | 377 ---------- .../packageship/application/models/package.py | 133 ++-- .../application/models/temporarydb.py | 86 --- 4 files changed, 305 insertions(+), 936 deletions(-) delete mode 100644 packageship/packageship/application/initsystem/datamerge.py delete mode 100644 packageship/packageship/application/models/temporarydb.py diff --git a/packageship/packageship/application/initsystem/data_import.py b/packageship/packageship/application/initsystem/data_import.py index 94d76dad..66ebfe94 100644 --- a/packageship/packageship/application/initsystem/data_import.py +++ b/packageship/packageship/application/initsystem/data_import.py @@ -11,17 +11,10 @@ from sqlalchemy.exc import SQLAlchemyError, InternalError from packageship.libs.dbutils.sqlalchemy_helper import DBHelper from packageship.libs.exception import ContentNoneException from packageship.libs.exception import DatabaseRepeatException -from packageship.libs.exception import DataMergeException from packageship.libs.exception import Error from packageship.libs.configutils.readconfig import ReadConfig from packageship.libs.log import Log -from packageship.application.models.package import bin_pack, src_pack, pack_requires, pack_provides -from packageship.application.initsystem.datamerge import MergeData -from packageship.application.models.temporarydb import src_package -from packageship.application.models.temporarydb import src_requires -from packageship.application.models.temporarydb import bin_package -from packageship.application.models.temporarydb import bin_requiresment -from packageship.application.models.temporarydb import bin_provides +from packageship.application.models.package import src_pack, bin_pack, bin_requires, src_requires, bin_provides, maintenance_info from packageship import system_config LOGGER = Log(__name__) @@ -48,26 +41,19 @@ class InitDataBase(): if self.config_file_path: # yaml configuration file content self.config_file_datas = self.__read_config_file() - self._read_config = ReadConfig() self.db_type = self._read_config.get_database('dbtype') - + self.sql = None + self._database = None + self.mainter_infos = dict() if self.db_type is None: self.db_type = 'mysql' if self.db_type not in ['mysql', 'sqlite']: LOGGER.logger.error("database type configuration error") raise Error('database type configuration error') - self._src_requires_dicts = dict() - self._bin_package_dicts = dict() - self._bin_package_name = dict() - self._bin_requires_dicts = dict() - self._bin_provides_dicts = dict() - self._src_packages = dict() - self._src_package_names = dict() self._sqlite_db = None - self.requires = dict() def __read_config_file(self): """ @@ -124,10 +110,14 @@ class InitDataBase(): # Create a database maintained by benchmark information if self.db_type == 'mysql': MysqlDatabaseOperations( - 'maintenance.information').create_datum_database() + db_name='maintenance.information', + tables=['maintenance_info'], + is_datum=True).create_database() else: SqliteDatabaseOperations( - 'maintenance.information').create_datum_database() + db_name='maintenance.information', + tables=['maintenance_info'], + is_datum=True).create_database() for database in self.config_file_datas: if not database.get('dbname'): @@ -154,15 +144,18 @@ class InitDataBase(): """ db_name = database.get('dbname') - self._sqlite_db = SqliteDatabaseOperations(db_name=db_name) - + tables = ['src_pack', 'bin_pack', + 'bin_requires', 'src_requires', 'bin_provides'] if self.db_type == 'mysql': - creatadatabase = MysqlDatabaseOperations(db_name) + creatadatabase = MysqlDatabaseOperations( + db_name=db_name, tables=tables) if not creatadatabase.create_database(): raise SQLAlchemyError("failed to create database or table") return db_name + self._sqlite_db = SqliteDatabaseOperations( + db_name=db_name, tables=tables) - sqltedb_file = self._sqlite_db.create_sqlite_database() + sqltedb_file = self._sqlite_db.create_database() if sqltedb_file is None: raise SQLAlchemyError( "failed to create database or table") @@ -179,35 +172,25 @@ class InitDataBase(): ContentNoneException: Exception with empty content TypeError: Data type error SQLAlchemyError: Abnormal database operation - DataMergeException: Error in data integration IOError: An error occurred while deleting the database information file """ try: - db_file = None # 1. create a database and related tables in the database db_name = self._create_database(database) # 2. get the data of binary packages and source packages - src_package_paths = database.get('src_db_file') - bin_package_paths = database.get('bin_db_file') + src_db_file = database.get('src_db_file') + bin_db_file = database.get('bin_db_file') - if src_package_paths is None or bin_package_paths is None: + if src_db_file is None or bin_db_file is None: raise ContentNoneException( - 'The configured database file path is empty') - if not isinstance(src_package_paths, list) \ - or not isinstance(bin_package_paths, list): - raise TypeError( - 'The source code and binary path types in the initialization file are abnormal') - # 3. Obtain temporary source package files and binary package files - db_file = self.file_merge( - src_package_paths, bin_package_paths) - # 4. dependencies between combined data - self.data_relationship(db_file) - # 5. save data - self.save_data(db_name) - - except (SQLAlchemyError, ContentNoneException, - DataMergeException, TypeError, Error) as error_msg: + 'The path to the sqlite file in the database initialization configuration \ + is incorrect ') + self._get_maintenance_info() + # # 3. Obtain temporary source package files and binary package files + self.__save_data(src_db_file, bin_db_file, db_name) + + except (SQLAlchemyError, ContentNoneException, TypeError, Error) as error_msg: # Delete the specified database try: if self.db_type == 'mysql': @@ -227,378 +210,255 @@ class InitDataBase(): } InitDataBase.__updata_settings_file( database_content=database_content) - finally: - try: - if os.path.exists(db_file): - os.remove(db_file) - except (IOError, UnboundLocalError) as error_msg: - LOGGER.logger.error(error_msg) - def _src_package_relation(self, src_package_data): + @staticmethod + def __columns(cursor): """ - Description: Mapping of data relations of source packages + Description: functional description:Returns all the column names + queried by the current cursor Args: - src_package_data: Source package data - Returns: + cursor: Cursor + Returns: + The first columns Raises: """ + return [col[0] for col in cursor.description] - _src_package_name = src_package_data.name - _src_package = { - "name": src_package_data.name, - "version": src_package_data.version, - "license": src_package_data.rpm_license, - "sourceURL": src_package_data.url, - "Maintaniner": src_package_data.maintaniner - } - if _src_package_name not in self._src_packages.keys(): - self._src_packages[_src_package_name] = _src_package - else: - # Determine the version number - if src_package_data.version > \ - self._src_packages[_src_package_name]['version']: - - self._src_packages[_src_package_name] = _src_package - # Delete previous version - for key in [names[0] for names in self._src_package_names.items() - if _src_package_name == names[1]]: - del self._src_package_names[key] - - self._src_package_names[src_package_data.pkgKey] = _src_package_name - - def _src_requires_relation(self, src_requires_data): + def __get_data(self): """ - Description: Source package dependent package data relationship mapping + According to different sql statements, query related table data + Args: - src_requires_data: Source package dependent package data + Returns: Raises: """ + if self.sql is None: + return None + try: + src_packages_data = self._database.session.execute(self.sql) + columns = InitDataBase.__columns( + src_packages_data.cursor) + return [dict(zip(columns, row)) for row in src_packages_data.fetchall()] + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) + return None - _src_package_name = self._src_package_names.get( - src_requires_data.pkgKey) - if _src_package_name: - if _src_package_name not in self._src_requires_dicts.keys(): - self._src_requires_dicts[_src_package_name] = [] - self._src_requires_dicts[_src_package_name].append({ - 'name': src_requires_data.name - }) - - def _bin_package_relation(self, bin_package_data): + def __save_data(self, src_db_file, bin_db_file, db_name): """ - Description: Binary package relationship mapping problem + Description: integration of multiple data files Args: - bin_package_data: Binary package data + src_package_paths: Source package database file + bin_package_paths: Binary package database file Returns: - + Path of the generated temporary database file Raises: """ + with DBHelper(db_name=src_db_file, db_type='sqlite:///', import_database=True) \ + as database: + self._database = database + # Save data related to source package + self._save_src_packages(db_name) + self._save_src_requires(db_name) - _bin_pkg_key = bin_package_data.pkgKey - self._bin_package_name[bin_package_data.name] = _bin_pkg_key - - src_package_name = bin_package_data.src_pack_name - _bin_package = { - 'name': bin_package_data.name, - 'version': bin_package_data.version, - 'relation': True - } - if src_package_name not in self._bin_package_dicts.keys(): - self._bin_package_dicts[src_package_name] = [] - - # Determine whether the version number is consistent with the source code package - # If they are the same, an association relationship is established. - for index, bin_package_item in enumerate(self._bin_package_dicts[src_package_name]): - if bin_package_item.get('name') == bin_package_data.name: - if bin_package_item.get('version') < bin_package_data.version: - self._bin_package_dicts[src_package_name][index]['relation'] = False + with DBHelper(db_name=bin_db_file, db_type='sqlite:///', import_database=True)\ + as database: + self._database = database + # Save binary package related data + self._save_bin_packages(db_name) + self._save_bin_requires(db_name) + self._save_bin_provides(db_name) - self._bin_package_dicts[src_package_name].append(_bin_package) - - def _bin_requires_relation(self, bin_requires_data): + def _save_src_packages(self, db_name): """ - Description: Binary package dependency package relationship mapping problem + Save the source package data + Args: - bin_requires_data: Binary package dependency package data + db_name: Saved database name Returns: Raises: """ + # Query all source packages + self.sql = " select * from packages " + packages_datas = self.__get_data() + if packages_datas is None: + raise ContentNoneException( + 'There is no relevant data in the source package provided') + try: + with DBHelper(db_name=db_name) as database: + database.batch_add(packages_datas, src_pack) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) - _bin_pkg_key = bin_requires_data.pkgKey - if _bin_pkg_key: - if _bin_pkg_key not in self._bin_requires_dicts: - self._bin_requires_dicts[_bin_pkg_key] = [] - - self._bin_requires_dicts[_bin_pkg_key].append({ - 'name': bin_requires_data.name - }) - - def _bin_provides_relation(self, bin_provides_data): + def _save_src_requires(self, db_name): """ - Description: Binary package provided by the relationship mapping problem + Args: - bin_provides_data: Component data provided by the binary package + Returns: Raises: """ + # Query all source packages + self.sql = " select * from requires " + requires_datas = self.__get_data() + if requires_datas is None: + LOGGER.logger.warning( + '{db_name}: The package data that the source package depends on is\ + empty'.format(db_name=db_name)) + try: + with DBHelper(db_name=db_name) as database: + database.batch_add(requires_datas, src_requires) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) - _bin_pkg_key = bin_provides_data.pkgKey - if _bin_pkg_key: - if _bin_pkg_key not in self._bin_provides_dicts: - self._bin_provides_dicts[_bin_pkg_key] = [] - self._bin_provides_dicts[_bin_pkg_key].append({ - 'name': bin_provides_data.name - }) - - def data_relationship(self, db_file): + def _save_bin_packages(self, db_name): """ - Description: dependencies between combined data + Save binary package data Args: - db_file: Temporary database file + Returns: Raises: - Error information - """ - self._bin_provides_dicts = dict() - self._bin_requires_dicts = dict() - self._bin_package_name = dict() - self._bin_package_dicts = dict() - self._src_requires_dicts = dict() - self._src_packages = dict() - self._src_package_names = dict() + """ + self.sql = " select * from packages " + bin_packaegs = self.__get_data() + if bin_packaegs is None: + raise ContentNoneException( + '{db_name}:There is no relevant data in the provided \ + binary package '.format(db_name=db_name)) + for index, bin_package_item in enumerate(bin_packaegs): + try: + src_package_name = bin_package_item.get('rpm_sourcerpm').split( + '-' + bin_package_item.get('version'))[0] + except AttributeError as exception_msg: + src_package_name = None + LOGGER.logger.warning(exception_msg) + finally: + bin_packaegs[index]['src_name'] = src_package_name try: - with DBHelper(db_name=db_file, db_type='sqlite:///') as database: - # source package data - for src_package_item in database.session.query(src_package).all(): - self._src_package_relation(src_package_item) - - # combine all dependent packages of source packages - for src_requires_item in database.session.query(src_requires).all(): - self._src_requires_relation(src_requires_item) - - # combine all binary packages - for bin_package_item in database.session.query(bin_package).all(): - self._bin_package_relation(bin_package_item) + with DBHelper(db_name=db_name) as database: + database.batch_add(bin_packaegs, bin_pack) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) - # combine all dependent packages under the current binary package - for bin_requires_item in database.session.query( - bin_requiresment).all(): - self._bin_requires_relation(bin_requires_item) - - # combine the packages provided by the current binary package - - for bin_provides_item in database.session.query(bin_provides).all(): - self._bin_provides_relation(bin_provides_item) - - except Error as error_msg: - LOGGER.logger.error(error_msg) - - def file_merge(self, src_package_paths, bin_package_paths): + def _save_bin_requires(self, db_name): """ - Description: integration of multiple data files + Args: - src_package_paths: Source package database file - bin_package_paths: Binary package database file + Returns: - Path of the generated temporary database file + Raises: - DataMergeException: Abnormal data integration + """ - _db_file = os.path.join( - self._sqlite_db.database_file_folder, 'temporary_database') - - if os.path.exists(_db_file): - os.remove(_db_file) - # create a temporary sqlite database - with DBHelper(db_name=_db_file, db_type='sqlite:///') as database: - tables = ['src_package', 'src_requires', - 'bin_package', 'bin_requiresment', 'bin_provides'] - database.create_table(tables) - - _src_package_key = 1 - # load all source package files and import the files into it - for src_file in src_package_paths: - load_sqlite_data = MergeData(db_file=src_file) - - # Combine data from all source packages - - _src_package_key, src_merge_result = load_sqlite_data.src_file_merge( - src_package_key=_src_package_key, db_file=_db_file) - if not src_merge_result: - raise DataMergeException( - 'abnormal multi-file database integration') - - # load binary package file - _bin_package_key = 1 - for bin_file in bin_package_paths: - load_sqlite_data = MergeData(db_file=bin_file) - - # Combine all binary package data - _bin_package_key, bin_merge_result = load_sqlite_data.bin_file_merge( - bin_package_key=_bin_package_key, db_file=_db_file) - if not bin_merge_result: - raise DataMergeException( - 'abnormal multi-file database integration') - return _db_file + self.sql = " select * from requires " + requires_datas = self.__get_data() + if requires_datas is None: + raise ContentNoneException( + '{db_name}:There is no relevant data in the provided binary \ + dependency package'.format(db_name=db_name)) + try: + with DBHelper(db_name=db_name) as database: + database.batch_add(requires_datas, bin_requires) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) - def __exists_repeat_database(self): + def _save_bin_provides(self, db_name): """ - Description: Determine if the same database name exists + Args: Returns: - True if there are duplicate databases, false otherwise + Raises: """ - db_names = [name.get('dbname') - for name in self.config_file_datas] - - if len(set(db_names)) != len(self.config_file_datas): - return True - - return False + self.sql = " select * from provides " + provides_datas = self.__get_data() + if provides_datas is None: + raise ContentNoneException( + '{db_name}:There is no relevant data in the provided \ + binary component'.format(db_name=db_name)) + try: + with DBHelper(db_name=db_name) as database: + database.batch_add(provides_datas, bin_provides) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) - def _save_bin_package(self, src_packages): + def _get_maintenance_info(self): """ - Description: Save binary package data + Description: Obtain the information of the maintainer Args: - src_packages: Source package data + Returns: - Binary package data + Maintainer related information Raises: - + SQLAlchemyError: An error occurred while executing the sql statement """ - bin_packages = [] - for package_data in src_packages: - try: - bin_package_datas = self._bin_package_dicts.pop( - package_data.name) - except KeyError: - bin_package_datas = None - - if bin_package_datas: - for bin_package_item in bin_package_datas: - bin_package_dict = { - 'name': bin_package_item.get('name'), - 'version': bin_package_item.get('version'), - 'srcIDkey': None - } - if bin_package_item.get('relation'): - bin_package_dict['srcIDkey'] = package_data.id - bin_packages.append(bin_package_dict) - - # source package dependency package - src_requires_datas = self._src_requires_dicts.get( - package_data.name) - if src_requires_datas: - for src_requires_item in src_requires_datas: - requires_name = src_requires_item.get('name') - if requires_name: - if requires_name not in self.requires.keys(): - self.requires[requires_name] = [] - self.requires[requires_name].append({ - 'name': src_requires_item.get('name'), - 'srcIDkey': package_data.id, - 'depProIDkey': None, - 'binIDkey': None - }) - - # organization independent binary package - - for bin_packs in self._bin_package_dicts.values(): - for bin_pack_item in bin_packs: - bin_packages.append({ - 'name': bin_pack_item.get('name'), - 'version': bin_pack_item.get('version'), - 'srcIDkey': None - }) - return bin_packages - - def _save_bin_provides(self, bin_packages): - """ - Description: Save package data provided by binary + try: + if not hasattr(self, 'mainter_infos'): + self.mainter_infos = dict() + with DBHelper(db_name='maintenance.information') as database: + for info in database.session.query(maintenance_info).all(): + if info.name not in self.mainter_infos.keys(): + self.mainter_infos[info.name] = [] + self.mainter_infos[info.name].append({ + 'version': info.version, + 'maintaniner': info.maintaniner, + 'maintainlevel': info.maintainlevel + }) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) + + def _get_mainter_info(self, src_package_name, version): + ''' + Get the maintainer information of the source package + Args: - bin_packages: Binary package data + src_package_name: Source package name + version: Source package version number Returns: - Package data provided by binary + Maintainer's name Raises: - """ - bin_provides_list = [] - for bin_pack_entity in bin_packages: - - # Get the pkgKey of the current binary package - pkg_key = self._bin_package_name.get(bin_pack_entity.name) - - if self._bin_requires_dicts.get(pkg_key): - for bin_requires_item in self._bin_requires_dicts.get(pkg_key): - requires_name = bin_requires_item.get('name') - if requires_name: - if requires_name not in self.requires.keys(): - self.requires[requires_name] = [] - self.requires[requires_name].append({ - 'name': bin_requires_item.get('name'), - 'binIDkey': bin_pack_entity.id, - 'depProIDkey': None, - 'srcIDkey': None - }) - - if self._bin_provides_dicts.get(pkg_key): - for bin_provides_item in self._bin_provides_dicts.get(pkg_key): - bin_provides_list.append({ - 'name': bin_provides_item.get('name'), - 'binIDkey': bin_pack_entity.id - }) - return bin_provides_list + ''' + maintenance_infos = self.mainter_infos.get(src_package_name) + maintaniner = None + if maintenance_infos: + for maintenance_item in maintenance_infos: + if maintenance_item.get('version') == version: + maintaniner = (maintenance_item.get( + 'maintaniner'), maintenance_item.get('maintainlevel')) + break + return maintaniner - def save_data(self, db_name): + def __exists_repeat_database(self): """ - Description: save related package data + Description: Determine if the same database name exists Args: - db_name: The name of the database - Returns: + Returns: + True if there are duplicate databases, false otherwise Raises: """ + db_names = [name.get('dbname') + for name in self.config_file_datas] - with DBHelper(db_name=db_name) as data_base: - # Add source package data - data_base.batch_add( - [src_package_item[1] for src_package_item in self._src_packages.items()], src_pack) - - self.requires = dict() - - # Save dependency data of binary packages and source packages - - data_base.batch_add(self._save_bin_package( - data_base.session.query(src_pack).all()), bin_pack) - - # Save all packages and dependent packages provided by the binary package - - data_base.batch_add(self._save_bin_provides( - data_base.session.query(bin_pack).all()), pack_provides) - - all_requires = [] - for provides_item in data_base.session.query(pack_provides).all(): - if provides_item.name in self.requires.keys(): - for requires_item in self.requires[provides_item.name]: - requires_item['depProIDkey'] = provides_item.id - all_requires.append(requires_item) + if len(set(db_names)) != len(self.config_file_datas): + return True - data_base.batch_add(all_requires, pack_requires) + return False @staticmethod def __updata_settings_file(**Kwargs): @@ -669,7 +529,8 @@ class InitDataBase(): if del_result: try: - file_read = open(system_config.DATABASE_FILE_INFO, 'r', encoding='utf-8') + file_read = open( + system_config.DATABASE_FILE_INFO, 'r', encoding='utf-8') _databases = yaml.load( file_read.read(), Loader=yaml.FullLoader) for database in _databases: @@ -693,7 +554,7 @@ class MysqlDatabaseOperations(): drop_database_sql: Delete the SQL statement of the database """ - def __init__(self, db_name): + def __init__(self, db_name, tables=None, is_datum=False): """ Description: Class instance initialization Args: @@ -704,6 +565,8 @@ class MysqlDatabaseOperations(): DEFAULT CHARACTER SET utf8mb4; '''.format(db_name=self.db_name) self.drop_database_sql = '''drop DATABASE if exists `{db_name}` '''.format( db_name=self.db_name) + self.tables = tables + self.is_datum = is_datum def create_database(self): """ @@ -720,9 +583,10 @@ class MysqlDatabaseOperations(): try: # create database - data_base.session.execute(self.drop_database_sql) + if not self.is_datum: + data_base.session.execute(self.drop_database_sql) data_base.session.execute(self.create_database_sql) - except SQLAlchemyError as exception_msg: + except (SQLAlchemyError, InternalError) as exception_msg: LOGGER.logger.error(exception_msg) return False else: @@ -766,9 +630,8 @@ class MysqlDatabaseOperations(): """ try: with DBHelper(db_name=self.db_name) as database: - tables = ['src_pack', 'bin_pack', 'pack_provides', - 'pack_requires'] - database.create_table(tables) + if self.tables: + database.create_table(self.tables) except SQLAlchemyError as exception_msg: LOGGER.logger.error(exception_msg) @@ -776,52 +639,6 @@ class MysqlDatabaseOperations(): else: return True - def create_datum_database(self): - """ - Description: Create a benchmark database to save the maintainer's information - Args: - - Returns: - True if successful, otherwise false - Raises: - SQLAlchemyError: An exception occurred while creating the database - """ - with DBHelper(db_name='mysql') as data_base: - # create database - try: - data_base.session.execute(self.create_database_sql) - except SQLAlchemyError as exception_msg: - LOGGER.logger.error(exception_msg) - return False - else: - # create tables - return self.__create_datum_tables() - - def __create_datum_tables(self): - """ - Description: Create a data table of maintainer information - rgs: - - Returns: - True if successful, otherwise false - Raises: - SQLAlchemyError: An exception occurred while creating the database - Error: Error information - """ - try: - with DBHelper(db_name=self.db_name) as database: - tables = ['maintenance_info'] - database.create_table(tables) - except InternalError as exists_table_err: - LOGGER.logger.error(exists_table_err) - return True - except (SQLAlchemyError, Error) as exception_msg: - LOGGER.logger.error(exception_msg) - return False - - else: - return True - class SqliteDatabaseOperations(): """ @@ -831,7 +648,7 @@ class SqliteDatabaseOperations(): database_file_folder: Database folder path """ - def __init__(self, db_name, **kwargs): + def __init__(self, db_name, tables=None, is_datum=False, ** kwargs): """ Description: Class instance initialization Args: @@ -844,6 +661,8 @@ class SqliteDatabaseOperations(): self._database_file_path() else: self.database_file_folder = kwargs.get('database_path') + self.tables = tables + self.is_datum = is_datum def _database_file_path(self): """ @@ -866,7 +685,7 @@ class SqliteDatabaseOperations(): LOGGER.logger.error(makedirs_error) self.database_file_folder = None - def create_sqlite_database(self): + def create_database(self): """ Description: create sqlite database and table Args: @@ -888,14 +707,13 @@ class SqliteDatabaseOperations(): os.remove(_db_file + '.db') # create a sqlite database - with DBHelper(db_name=_db_file) as database: - tables = ['src_pack', 'bin_pack', - 'pack_requires', 'pack_provides'] - try: - database.create_table(tables) - except SQLAlchemyError as create_table_err: - LOGGER.logger.error(create_table_err) - return None + if (self.is_datum and not os.path.exists(_db_file + '.db')) or not self.is_datum: + with DBHelper(db_name=_db_file) as database: + try: + database.create_table(self.tables) + except (SQLAlchemyError, InternalError) as create_table_err: + LOGGER.logger.error(create_table_err) + return None return _db_file @@ -920,32 +738,3 @@ class SqliteDatabaseOperations(): return False else: return True - - def create_datum_database(self): - """ - Description: create sqlite database and table - Args: - - Returns: - After successful generation, return the database file address, - otherwise return none - Raises: - FileNotFoundError: The specified database folder does not exist - SQLAlchemyError: An error occurred while generating the database - """ - if self.database_file_folder is None: - raise FileNotFoundError('Database folder does not exist') - - _db_file = os.path.join( - self.database_file_folder, self.db_name) - - if not os.path.exists(_db_file + '.db'): - # create a sqlite database - with DBHelper(db_name=_db_file) as database: - tables = ['maintenance_info'] - try: - database.create_table(tables) - except SQLAlchemyError as create_table_err: - LOGGER.logger.error(create_table_err) - return None - return _db_file diff --git a/packageship/packageship/application/initsystem/datamerge.py b/packageship/packageship/application/initsystem/datamerge.py deleted file mode 100644 index 85a6be7e..00000000 --- a/packageship/packageship/application/initsystem/datamerge.py +++ /dev/null @@ -1,377 +0,0 @@ -#!/usr/bin/python3 -""" -Description: Integration of multiple sqlite file data, including reading - sqlite database and inserting data -Class: MergeData -""" -from sqlalchemy.exc import SQLAlchemyError -from packageship.application.models.temporarydb import src_package -from packageship.application.models.temporarydb import src_requires -from packageship.application.models.temporarydb import bin_package -from packageship.application.models.temporarydb import bin_requiresment -from packageship.application.models.temporarydb import bin_provides -from packageship.application.models.package import maintenance_info -from packageship.libs.dbutils import DBHelper -from packageship.libs.log import Log - -LOGGER = Log(__name__) - - -class MergeData(): - """ - Description: Load data from sqlite database - Attributes: - db_file: Database file - db_type: Connected database type - datum_database: Base database name - """ - - def __init__(self, db_file): - """ - Description: Class instance initialization - Args: - db_file: Database file - """ - self.db_file = db_file - self.db_type = 'sqlite:///' - self.datum_database = 'maintenance.information' - self.src_requires_dicts = dict() - self.src_package_datas = [] - self.bin_provides_dicts = dict() - self.bin_package_datas = [] - self.mainter_infos = dict() - self.bin_requires_dicts = dict() - - @staticmethod - def __columns(cursor): - """ - Description: functional description:Returns all the column names - queried by the current cursor - Args: - cursor: Cursor - - Returns: - The first columns - Raises: - - """ - return [col[0] for col in cursor.description] - - def get_package_data(self): - """ - Description: get binary package or source package data - Args: - - Returns: - All source package data queried - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - try: - with DBHelper(db_name=self.db_file, db_type=self.db_type, import_database=True) \ - as database: - src_packages_data = database.session.execute( - "select pkgKey,name,version,rpm_license,url,rpm_sourcerpm from packages") - columns = MergeData.__columns( - src_packages_data.cursor) - return [dict(zip(columns, row)) for row in src_packages_data.fetchall()] - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - return None - - def get_requires_data(self): - """ - Description: get dependent package data of binary package or source package - Args: - - Returns: - All dependent data queried - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - try: - with DBHelper(db_name=self.db_file, db_type=self.db_type, import_database=True) \ - as database: - requires = database.session.execute( - "select pkgKey,name from requires") - columns = MergeData.__columns(requires.cursor) - return [dict(zip(columns, row)) for row in requires.fetchall()] - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - return None - - def get_provides(self): - """ - Description: get the dependency package provided by the binary package - Args: - - Returns: - Query the component data provided by all binary packages - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - try: - with DBHelper(db_name=self.db_file, db_type=self.db_type, import_database=True) \ - as database: - requires = database.session.execute( - "select pkgKey,name from provides") - columns = MergeData.__columns(requires.cursor) - return [dict(zip(columns, row)) for row in requires.fetchall()] - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - return None - - def get_maintenance_info(self): - """ - Description: Obtain the information of the maintainer - Args: - - Returns: - Maintainer related information - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - try: - if not hasattr(self, 'mainter_infos'): - self.mainter_infos = dict() - with DBHelper(db_name=self.datum_database) as database: - for info in database.session.query(maintenance_info).all(): - if info.name not in self.mainter_infos.keys(): - self.mainter_infos[info.name] = [] - self.mainter_infos[info.name].append({ - 'version': info.version, - 'maintaniner': info.maintaniner - }) - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - - def src_file_merge(self, src_package_key, db_file): - """ - Description: Source code related data integration - Args: - src_package_key: The relevant key value of the source package - db_file: Database file - Returns: - Key value after successful data combination - (0, False) or (src_package_key, True) - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - self.get_maintenance_info() - - self.__compose_src_package() - - self.__compose_src_rquires() - - # Combination of relationships between source packages and dependent packages - src_requires_data = [] - for src_package_item in self.src_package_datas: - src_package_key += 1 - requires = self.src_requires_dicts.get( - src_package_item.get('pkgKey')) - if requires: - for src_requires_item in requires: - src_requires_item['pkgKey'] = src_package_key - src_requires_data.append(src_requires_item) - src_package_item['pkgKey'] = src_package_key - - try: - with DBHelper(db_name=db_file, db_type=self.db_type) as data_base: - data_base.batch_add(self.src_package_datas, src_package) - data_base.batch_add(src_requires_data, src_requires) - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - return (0, False) - else: - return (src_package_key, True) - - def __compose_src_package(self): - """ - Description: Combine source package data - Args: - - Returns: - - Raises: - - """ - if getattr(self, 'src_package_datas', None) is None: - self.src_package_datas = [] - - for src_package_item in self.get_package_data(): - src_package_name = src_package_item.get('name') - if src_package_name: - # Find the maintainer information of the current data - maintenance_infos = self.mainter_infos.get(src_package_name) - maintenance = [] - version = src_package_item.get('version') - if self.mainter_infos.get(src_package_name): - for maintenance_item in maintenance_infos: - if maintenance_item.get('version') == version: - maintenance.append(maintenance_item) - - self.src_package_datas.append( - { - "name": src_package_item.get('name'), - "version": version, - "rpm_license": src_package_item.get('rpm_license'), - "url": src_package_item.get('url'), - "pkgKey": src_package_item.get('pkgKey'), - 'maintaniner': - maintenance[0].get('maintaniner') if maintenance and len( - maintenance) > 0 else None - } - ) - - def __compose_src_rquires(self): - """ - Description: Combine source package dependent package data - Args: - - Returns: - - Raises: - - """ - if getattr(self, 'src_requires_dicts', None) is None: - self.src_requires_dicts = dict() - - for src_requires_item in self.get_requires_data(): - pkg_key = src_requires_item.get('pkgKey') - if pkg_key: - if pkg_key not in self.src_requires_dicts.keys(): - self.src_requires_dicts[pkg_key] = [] - self.src_requires_dicts[pkg_key].append( - { - 'name': src_requires_item.get('name'), - 'pkgKey': pkg_key - } - ) - - def __compose_bin_package(self): - """ - Description: Combine binary package data - Args: - - Returns: - - Raises: - AttributeError - """ - if getattr(self, 'bin_package_datas', None) is None: - self.bin_package_datas = [] - - for bin_package_item in self.get_package_data(): - try: - src_package_name = bin_package_item.get('rpm_sourcerpm').split( - '-' + bin_package_item.get('version'))[0] - except AttributeError as exception_msg: - src_package_name = None - LOGGER.logger.warning(exception_msg) - else: - self.bin_package_datas.append( - { - "name": bin_package_item.get('name'), - "version": bin_package_item.get('version'), - "license": bin_package_item.get('rpm_license'), - "sourceURL": bin_package_item.get('url'), - "src_pack_name": src_package_name, - "pkgKey": bin_package_item.get('pkgKey') - } - ) - - def __compose_bin_requires(self): - """ - Description: Combining binary dependent package data - Args: - - Returns: - - Raises: - """ - if getattr(self, 'bin_requires_dicts', None) is None: - self.bin_requires_dicts = dict() - - for bin_requires_item in self.get_requires_data(): - pkg_key = bin_requires_item.get('pkgKey') - if pkg_key: - if pkg_key not in self.bin_requires_dicts.keys(): - self.bin_requires_dicts[pkg_key] = [] - self.bin_requires_dicts[pkg_key].append({ - 'name': bin_requires_item.get('name'), - 'pkgKey': 0 - }) - - def __compose_bin_provides(self): - """ - Description: Combine binary package data - Args: - - Returns: - - Raises: - - """ - if getattr(self, 'bin_provides_dicts', None) is None: - self.bin_provides_dicts = dict() - - for bin_provides_item in self.get_provides(): - pkg_key = bin_provides_item.get('pkgKey') - if pkg_key: - if pkg_key not in self.bin_provides_dicts.keys(): - self.bin_provides_dicts[pkg_key] = [] - self.bin_provides_dicts[pkg_key].append({ - 'name': bin_provides_item.get('name'), - 'pkgKey': 0 - }) - - def bin_file_merge(self, bin_package_key, db_file): - """ - Description: Binary package related data integration - Args: - bin_package_key: Primary key of binary package - db_file: Database file - Returns: - Key value after successful data combination - (0, False) or (bin_package_key, True) - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - self.__compose_bin_package() - # binary package dependent package integration - - self.__compose_bin_requires() - - self.__compose_bin_provides() - - # integrate the id data of the binary package - bin_requires_datas = [] - bin_provides_datas = [] - for bin_package_item in self.bin_package_datas: - bin_package_key += 1 - # dependent packages - requires = self.bin_requires_dicts.get( - bin_package_item.get('pkgKey')) - if requires: - for bin_requires_item in requires: - bin_requires_item['pkgKey'] = bin_package_key - bin_requires_datas.append(bin_requires_item) - - provides = self.bin_provides_dicts.get( - bin_package_item.get('pkgKey')) - if provides: - for bin_provides_item in provides: - bin_provides_item['pkgKey'] = bin_package_key - bin_provides_datas.append(bin_provides_item) - bin_package_item['pkgKey'] = bin_package_key - # save binary package related data - try: - with DBHelper(db_name=db_file, db_type=self.db_type) as data_base: - data_base.batch_add(self.bin_package_datas, bin_package) - data_base.batch_add(bin_requires_datas, bin_requiresment) - data_base.batch_add(bin_provides_datas, bin_provides) - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - return (0, False) - else: - return (bin_package_key, True) diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py index c0f8acb3..eddd9e3a 100644 --- a/packageship/packageship/application/models/package.py +++ b/packageship/packageship/application/models/package.py @@ -2,33 +2,43 @@ """ Description: Database entity model mapping """ -from sqlalchemy import Column, Integer, String, ForeignKey -from sqlalchemy.orm import relationship +from sqlalchemy import Column, Integer, String, ForeignKey, Boolean from packageship.libs.dbutils.sqlalchemy_helper import DBHelper class src_pack(DBHelper.BASE): # pylint: disable=C0103,R0903 """ - Description: functional description:Source package model + Source package model """ __tablename__ = 'src_pack' - id = Column(Integer, primary_key=True) - - name = Column(String(500), nullable=True) - - version = Column(String(200), nullable=True) - - license = Column(String(500), nullable=True) - - sourceURL = Column(String(200), nullable=True) - - downloadURL = Column(String(200), nullable=True) - - Maintaniner = Column(String(50), nullable=True) - - MaintainLevel = Column(String(20), nullable=True) + pkgKey = Column(Integer, primary_key=True) + pkgId = Column(String(500), nullable=True) + name = Column(String(200), nullable=True) + arch = Column(String(200), nullable=True) + version = Column(String(500), nullable=True) + epoch = Column(String(200), nullable=True) + release = Column(String(500), nullable=True) + summary = Column(String(500), nullable=True) + description = Column(String(500), nullable=True) + url = Column(String(500), nullable=True) + time_file = Column(Integer, nullable=True) + time_build = Column(Integer, nullable=True) + rpm_license = Column(String(500), nullable=True) + rpm_vendor = Column(String(500), nullable=True) + rpm_group = Column(String(500), nullable=True) + rpm_buildhost = Column(String(500), nullable=True) + rpm_sourcerpm = Column(String(500), nullable=True) + rpm_header_start = Column(Integer, nullable=True) + rpm_header_end = Column(Integer, nullable=True) + rpm_packager = Column(String(500), nullable=True) + size_package = Column(Integer, nullable=True) + size_installed = Column(Integer, nullable=True) + size_archive = Column(Integer, nullable=True) + location_href = Column(String(500), nullable=True) + location_base = Column(String(500), nullable=True) + checksum_type = Column(String(500), nullable=True) class bin_pack(DBHelper.BASE): # pylint: disable=C0103,R0903 @@ -37,48 +47,81 @@ class bin_pack(DBHelper.BASE): # pylint: disable=C0103,R0903 """ __tablename__ = 'bin_pack' - id = Column(Integer, primary_key=True) - + pkgKey = Column(Integer, primary_key=True) + pkgId = Column(String(500), nullable=True) name = Column(String(500), nullable=True) - - version = Column(String(200), nullable=True) - - srcIDkey = Column(Integer, ForeignKey('src_pack.id')) - - src_pack = relationship('src_pack', backref="bin_pack") - - -class pack_requires(DBHelper.BASE): # pylint: disable=C0103,R0903 + arch = Column(String(500), nullable=True) + version = Column(String(500), nullable=True) + epoch = Column(String(500), nullable=True) + release = Column(String(500), nullable=True) + summary = Column(String(500), nullable=True) + description = Column(String(500), nullable=True) + url = Column(String(500), nullable=True) + time_file = Column(Integer, nullable=True) + time_build = Column(Integer, nullable=True) + rpm_license = Column(String(500), nullable=True) + rpm_vendor = Column(String(500), nullable=True) + rpm_group = Column(String(500), nullable=True) + rpm_buildhost = Column(String(500), nullable=True) + rpm_sourcerpm = Column(String(500), nullable=True) + rpm_header_start = Column(Integer, nullable=True) + rpm_header_end = Column(Integer, nullable=True) + rpm_packager = Column(String(500), nullable=True) + size_package = Column(Integer, nullable=True) + size_installed = Column(Integer, nullable=True) + size_archive = Column(Integer, nullable=True) + location_href = Column(String(500), nullable=True) + location_base = Column(String(500), nullable=True) + checksum_type = Column(String(500), nullable=True) + src_name = Column(String(500), nullable=True) + + +class bin_requires(DBHelper.BASE): # pylint: disable=C0103,R0903 """ - functional description: + Binary package dependent package entity model """ - __tablename__ = 'pack_requires' + __tablename__ = 'bin_requires' id = Column(Integer, primary_key=True) + name = Column(String(200), nullable=True) + flags = Column(String(200), nullable=True) + epoch = Column(String(200), nullable=True) + version = Column(String(500), nullable=True) + release = Column(String(200), nullable=True) + pkgKey = Column(Integer, nullable=True) + pre = Column(String(20), nullable=True) - name = Column(String(500), nullable=True) - - # depProIDkey = Column(Integer, ForeignKey( - # 'pack_provides.id'), nullable=True) - depProIDkey = Column(Integer) - srcIDkey = Column(Integer, ForeignKey('src_pack.id'), nullable=True) +class src_requires(DBHelper.BASE): # pylint: disable=C0103,R0903 + """ + Source entity package dependent package entity model + """ + __tablename__ = 'src_requires' - binIDkey = Column(Integer, ForeignKey('bin_pack.id'), nullable=True) + id = Column(Integer, primary_key=True) + name = Column(String(200), nullable=True) + flags = Column(String(200), nullable=True) + epoch = Column(String(200), nullable=True) + version = Column(String(500), nullable=True) + release = Column(String(200), nullable=True) + pkgKey = Column(Integer, nullable=True) + pre = Column(String(20), nullable=True) -class pack_provides(DBHelper.BASE): # pylint: disable=C0103,R0903 +class bin_provides(DBHelper.BASE): # pylint: disable=C0103,R0903 """ - functional description: + Component entity model provided by binary package """ - __tablename__ = 'pack_provides' + __tablename__ = 'bin_provides' id = Column(Integer, primary_key=True) - - name = Column(String(500), nullable=True) - - binIDkey = Column(Integer, ForeignKey('bin_pack.id')) + name = Column(String(200), nullable=True) + flags = Column(String(200), nullable=True) + epoch = Column(String(200), nullable=True) + version = Column(String(500), nullable=True) + release = Column(String(200), nullable=True) + pkgKey = Column(Integer, nullable=True) class maintenance_info(DBHelper.BASE): # pylint: disable=C0103,R0903 diff --git a/packageship/packageship/application/models/temporarydb.py b/packageship/packageship/application/models/temporarydb.py deleted file mode 100644 index 07a2dd17..00000000 --- a/packageship/packageship/application/models/temporarydb.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/python3 -""" -Description: Database entity model mapping -""" -from sqlalchemy import Column, Integer, String -from packageship.libs.dbutils.sqlalchemy_helper import DBHelper - - -class src_package(DBHelper.BASE): # pylint: disable=C0103,R0903 - """ - Description: Temporary source package model - """ - - __tablename__ = 'src_package' - - pkgKey = Column(Integer, primary_key=True) - - name = Column(String(500), nullable=True) - - version = Column(String(200), nullable=True) - - rpm_license = Column(String(500), nullable=True) - - url = Column(String(200), nullable=True) - - maintaniner = Column(String(100), nullable=True) - - -class bin_package(DBHelper.BASE): # pylint: disable=C0103,R0903 - """ - Description: Temporary binary package model - """ - __tablename__ = 'bin_package' - - pkgKey = Column(Integer, primary_key=True) - - name = Column(String(500), nullable=True) - - version = Column(String(200), nullable=True) - - rpm_license = Column(String(500), nullable=True) - - url = Column(String(500), nullable=True) - - rpm_sourcerpm = Column(String(500), nullable=True) - - src_pack_name = Column(String(200), nullable=True) - - -class src_requires(DBHelper.BASE): # pylint: disable=C0103,R0903 - """ - Description: Temporary source package depends on package model - """ - __tablename__ = 'src_requires' - - id = Column(Integer, primary_key=True) - - pkgKey = Column(Integer) - - name = Column(String(500), nullable=True) - - -class bin_requiresment(DBHelper.BASE): # pylint: disable=C0103,R0903 - """ - Description: Dependency package model for temporary binary packages - """ - __tablename__ = 'bin_requiresment' - - id = Column(Integer, primary_key=True) - - pkgKey = Column(Integer) - - name = Column(String(500), nullable=True) - - -class bin_provides(DBHelper.BASE): # pylint: disable=C0103,R0903 - """ - Description: Provided package model for temporary binary packages - """ - __tablename__ = 'bin_provides' - - id = Column(Integer, primary_key=True) - - pkgKey = Column(Integer) - - name = Column(String(500), nullable=True) -- Gitee From 736ed328af91bd2e1522da547362ef1463c5d206 Mon Sep 17 00:00:00 2001 From: gongzt Date: Wed, 15 Jul 2020 19:37:36 +0800 Subject: [PATCH 03/19] =?UTF-8?q?=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E8=B7=AF=E5=BE=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packageship/packageship/package.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packageship/packageship/package.ini b/packageship/packageship/package.ini index 46444d84..c63d9a7b 100644 --- a/packageship/packageship/package.ini +++ b/packageship/packageship/package.ini @@ -1,7 +1,7 @@ [SYSTEM] ; Configuration file path for data initialization -init_conf_path=/etc/pkgship/conf.yaml +init_conf_path=D:\\Project\\database\\conf.yaml ; Whether the system is in debug mode debug=false -- Gitee From b38a2607bb7cdd6606c360559b417238dd29ca3d Mon Sep 17 00:00:00 2001 From: gongzt Date: Wed, 15 Jul 2020 20:12:00 +0800 Subject: [PATCH 04/19] =?UTF-8?q?=E7=BB=B4=E6=8A=A4=E4=BA=BA=E4=BF=A1?= =?UTF-8?q?=E6=81=AF=E6=9F=A5=E8=AF=A2=E4=BC=98=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../application/initsystem/data_import.py | 106 ++++++++++-------- .../packageship/application/models/package.py | 4 +- 2 files changed, 64 insertions(+), 46 deletions(-) diff --git a/packageship/packageship/application/initsystem/data_import.py b/packageship/packageship/application/initsystem/data_import.py index 66ebfe94..cbeded63 100644 --- a/packageship/packageship/application/initsystem/data_import.py +++ b/packageship/packageship/application/initsystem/data_import.py @@ -14,7 +14,12 @@ from packageship.libs.exception import DatabaseRepeatException from packageship.libs.exception import Error from packageship.libs.configutils.readconfig import ReadConfig from packageship.libs.log import Log -from packageship.application.models.package import src_pack, bin_pack, bin_requires, src_requires, bin_provides, maintenance_info +from packageship.application.models.package import src_pack +from packageship.application.models.package import bin_pack +from packageship.application.models.package import bin_requires +from packageship.application.models.package import src_requires +from packageship.application.models.package import bin_provides +from packageship.application.models.package import maintenance_info from packageship import system_config LOGGER = Log(__name__) @@ -24,6 +29,7 @@ class InitDataBase(): """ Description: Database initialization, generate multiple databases and data based on configuration files + Attributes: config_file_path: configuration file path config_file_datas: initialize the configuration content of the database @@ -33,6 +39,7 @@ class InitDataBase(): def __init__(self, config_file_path=None): """ Description: Class instance initialization + Args: config_file_path: Configuration file path """ @@ -57,10 +64,8 @@ class InitDataBase(): def __read_config_file(self): """ - Description: Read the contents of the configuration file load each - node data in the yaml configuration file as - a list to return - Args: + Read the contents of the configuration file load each + node data in the yaml configuration file as a list to return Returns: Initialize the contents of the database configuration file @@ -88,10 +93,7 @@ class InitDataBase(): def init_data(self): """ - Description: Initialization of the database - Args: - - Returns: + Initialization of the database Raises: IOError: An error occurred while deleting the database information file @@ -118,7 +120,7 @@ class InitDataBase(): db_name='maintenance.information', tables=['maintenance_info'], is_datum=True).create_database() - + self._get_maintenance_info() for database in self.config_file_datas: if not database.get('dbname'): continue @@ -134,7 +136,8 @@ class InitDataBase(): def _create_database(self, database): """ - Description: create related databases + create related databases + Args: database: Initialize the configuration content of the database Returns: @@ -163,7 +166,8 @@ class InitDataBase(): def _init_data(self, database): """ - Description: data initialization operation + data initialization operation + Args: database: Initialize the configuration content of the database Returns: @@ -186,7 +190,6 @@ class InitDataBase(): raise ContentNoneException( 'The path to the sqlite file in the database initialization configuration \ is incorrect ') - self._get_maintenance_info() # # 3. Obtain temporary source package files and binary package files self.__save_data(src_db_file, bin_db_file, db_name) @@ -198,7 +201,6 @@ class InitDataBase(): database.get('dbname')) else: self._sqlite_db.drop_database() - except (IOError, Error) as exception_msg: LOGGER.logger.error(exception_msg) else: @@ -214,11 +216,11 @@ class InitDataBase(): @staticmethod def __columns(cursor): """ - Description: functional description:Returns all the column names - queried by the current cursor + functional description:Returns all the column names + queried by the current cursor + Args: cursor: Cursor - Returns: The first columns Raises: @@ -250,7 +252,8 @@ class InitDataBase(): def __save_data(self, src_db_file, bin_db_file, db_name): """ - Description: integration of multiple data files + integration of multiple data files + Args: src_package_paths: Source package database file bin_package_paths: Binary package database file @@ -290,7 +293,13 @@ class InitDataBase(): packages_datas = self.__get_data() if packages_datas is None: raise ContentNoneException( - 'There is no relevant data in the source package provided') + '{db_name}:There is no relevant data in the source \ + package provided '.format(db_name=db_name)) + for index, src_package_item in enumerate(packages_datas): + maintaniner, maintainlevel = self._get_mainter_info( + src_package_item.get('name'), src_package_item.get('version')) + packages_datas[index]['maintaniner'] = maintaniner + packages_datas[index]['maintainlevel'] = maintainlevel try: with DBHelper(db_name=db_name) as database: database.batch_add(packages_datas, src_pack) @@ -299,9 +308,10 @@ class InitDataBase(): def _save_src_requires(self, db_name): """ + Save the dependent package data of the source package Args: - + db_name:Name database Returns: Raises: @@ -323,8 +333,9 @@ class InitDataBase(): def _save_bin_packages(self, db_name): """ Save binary package data - Args: + Args: + db_name:Name database Returns: Raises: @@ -353,9 +364,10 @@ class InitDataBase(): def _save_bin_requires(self, db_name): """ + Save the dependent package data of the binary package Args: - + db_name:Name database Returns: Raises: @@ -375,9 +387,10 @@ class InitDataBase(): def _save_bin_provides(self, db_name): """ + Save the component data provided by the binary package Args: - + db_name:Name database Returns: Raises: @@ -398,7 +411,6 @@ class InitDataBase(): def _get_maintenance_info(self): """ Description: Obtain the information of the maintainer - Args: Returns: Maintainer related information @@ -440,12 +452,13 @@ class InitDataBase(): maintaniner = (maintenance_item.get( 'maintaniner'), maintenance_item.get('maintainlevel')) break + if maintaniner is None: + maintaniner = (None, None) return maintaniner def __exists_repeat_database(self): """ - Description: Determine if the same database name exists - Args: + Determine if the same database name exists Returns: True if there are duplicate databases, false otherwise @@ -463,7 +476,8 @@ class InitDataBase(): @staticmethod def __updata_settings_file(**Kwargs): """ - Description: update some configuration files related to the database in the system + update some configuration files related to the database in the system + Args: **Kwargs: data related to configuration file nodes database_name: Name database @@ -492,7 +506,8 @@ class InitDataBase(): @staticmethod def delete_settings_file(): """ - Description: Delete the configuration file of the database + Delete the configuration file of the database + Args: Returns: @@ -512,7 +527,8 @@ class InitDataBase(): def delete_db(self, db_name): """ - Description: elete the database + delete the database + Args: db_name: The name of the database Returns: @@ -547,7 +563,8 @@ class InitDataBase(): class MysqlDatabaseOperations(): """ - Description: Related to database operations, creating databases, creating tables + Related to database operations, creating databases, creating tables + Attributes: db_name: The name of the database create_database_sql: SQL statement to create a database @@ -556,7 +573,8 @@ class MysqlDatabaseOperations(): def __init__(self, db_name, tables=None, is_datum=False): """ - Description: Class instance initialization + Class instance initialization + Args: db_name: Database name """ @@ -570,8 +588,7 @@ class MysqlDatabaseOperations(): def create_database(self): """ - Description: create a database - Args: + create a mysql database Returns: True if successful, otherwise false @@ -596,7 +613,8 @@ class MysqlDatabaseOperations(): @classmethod def drop_database(cls, db_name): """ - Description: Delete the database according to the specified name + Delete the database according to the specified name + Args: db_name: The name of the database to be deleted Returns: @@ -620,8 +638,7 @@ class MysqlDatabaseOperations(): def __create_tables(self): """ - Description: Create the specified data table - Args: + Create the specified data table Returns: True if successful, otherwise false @@ -642,7 +659,8 @@ class MysqlDatabaseOperations(): class SqliteDatabaseOperations(): """ - Description: sqlite database related operations + sqlite database related operations + Attributes: db_name: Name database database_file_folder: Database folder path @@ -650,7 +668,8 @@ class SqliteDatabaseOperations(): def __init__(self, db_name, tables=None, is_datum=False, ** kwargs): """ - Description: Class instance initialization + Class instance initialization + Args: db_name: Database name kwargs: data related to configuration file nodes @@ -666,8 +685,8 @@ class SqliteDatabaseOperations(): def _database_file_path(self): """ - Description: Database file path - Args: + Database file path + Returns: Raises: @@ -687,8 +706,7 @@ class SqliteDatabaseOperations(): def create_database(self): """ - Description: create sqlite database and table - Args: + create sqlite database and table Returns: After successful generation, return the database file address, @@ -719,15 +737,13 @@ class SqliteDatabaseOperations(): def drop_database(self): """ - Description: Delete the specified sqlite database - Args: + Delete the specified sqlite database Returns: Return true after successful deletion, otherwise return false Raises: IOError: An io exception occurred while deleting the specified database file """ - try: db_path = os.path.join( self.database_file_folder, self.db_name + '.db') diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py index eddd9e3a..725f4af7 100644 --- a/packageship/packageship/application/models/package.py +++ b/packageship/packageship/application/models/package.py @@ -2,7 +2,7 @@ """ Description: Database entity model mapping """ -from sqlalchemy import Column, Integer, String, ForeignKey, Boolean +from sqlalchemy import Column, Integer, String from packageship.libs.dbutils.sqlalchemy_helper import DBHelper @@ -39,6 +39,8 @@ class src_pack(DBHelper.BASE): # pylint: disable=C0103,R0903 location_href = Column(String(500), nullable=True) location_base = Column(String(500), nullable=True) checksum_type = Column(String(500), nullable=True) + maintaniner = Column(String(100), nullable=True) + maintainlevel = Column(String(100), nullable=True) class bin_pack(DBHelper.BASE): # pylint: disable=C0103,R0903 -- Gitee From ba680dfc8e83db0be246c8ae312d1f60229efd6e Mon Sep 17 00:00:00 2001 From: gongzt Date: Thu, 30 Jul 2020 13:55:54 +0800 Subject: [PATCH 05/19] Package life cycle information --- .../packageship/application/__init__.py | 6 + .../packageship/application/apps/__init__.py | 4 +- .../application/apps/lifecycle/__init__.py | 20 +++ .../apps/lifecycle/function/__init__.py | 15 ++ .../apps/lifecycle/function/base.py | 40 +++++ .../apps/lifecycle/function/download_yaml.py | 154 ++++++++++++++++++ .../apps/lifecycle/function/pypi.py | 84 ++++++++++ .../application/apps/lifecycle/url.py | 0 .../application/apps/lifecycle/view.py | 0 .../application/initsystem/data_import.py | 104 ++++-------- .../packageship/application/models/package.py | 43 ++++- .../packageship/application/settings.py | 2 + packageship/packageship/selfpkg.py | 2 + 13 files changed, 400 insertions(+), 74 deletions(-) create mode 100644 packageship/packageship/application/apps/lifecycle/__init__.py create mode 100644 packageship/packageship/application/apps/lifecycle/function/__init__.py create mode 100644 packageship/packageship/application/apps/lifecycle/function/base.py create mode 100644 packageship/packageship/application/apps/lifecycle/function/download_yaml.py create mode 100644 packageship/packageship/application/apps/lifecycle/function/pypi.py create mode 100644 packageship/packageship/application/apps/lifecycle/url.py create mode 100644 packageship/packageship/application/apps/lifecycle/view.py diff --git a/packageship/packageship/application/__init__.py b/packageship/packageship/application/__init__.py index bc3a6316..8b65f98d 100644 --- a/packageship/packageship/application/__init__.py +++ b/packageship/packageship/application/__init__.py @@ -4,6 +4,7 @@ """ from flask import Flask from flask_session import Session +from flask_apscheduler import APScheduler from packageship.application.settings import Config from packageship.libs.log import setup_log @@ -23,6 +24,11 @@ def init_app(operation): app.config.from_object(Config) + # Register a scheduled task + scheduler = APScheduler() + scheduler.init_app(app) + scheduler.start() + # Open session function Session(app) diff --git a/packageship/packageship/application/apps/__init__.py b/packageship/packageship/application/apps/__init__.py index 6a86c78c..81bcadda 100644 --- a/packageship/packageship/application/apps/__init__.py +++ b/packageship/packageship/application/apps/__init__.py @@ -3,9 +3,11 @@ Blueprint collection trying to page """ from packageship.application.apps.package import package, api as package_api +from packageship.application.apps.lifecycle import lifecycle, api as life_cycle_api blue_point = [ - (package, package_api) + (package, package_api), + (lifecycle, life_cycle_api) ] __all__ = ['blue_point'] diff --git a/packageship/packageship/application/apps/lifecycle/__init__.py b/packageship/packageship/application/apps/lifecycle/__init__.py new file mode 100644 index 00000000..b6a458e7 --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/__init__.py @@ -0,0 +1,20 @@ +#!/usr/bin/python3 +""" + +""" +from flask.blueprints import Blueprint +from flask_restful import Api +from packageship.application.apps.lifecycle.url import urls +from packageship import application + +lifecycle = Blueprint('lifecycle', __name__) + +# init restapi +api = Api() + +for view, url, operation in urls: + if application.OPERATION and application.OPERATION in operation.keys(): + api.add_resource(view, url) + + +__all__ = ['lifecycle', 'api'] diff --git a/packageship/packageship/application/apps/lifecycle/function/__init__.py b/packageship/packageship/application/apps/lifecycle/function/__init__.py new file mode 100644 index 00000000..3bd2ad10 --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/function/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/python3 +""" +Registration of timed tasks +""" +from packageship.selfpkg import app +from .download_yaml import update_pkg_info + + +def start_tasks(): + """ + Start of timing tasks, used to register timing tasks that need to be executed + + """ + app.apscheduler.add_job( # pylint: disable=no-member + func=update_pkg_info, id="update_package_data", trigger="interval", seconds=30) diff --git a/packageship/packageship/application/apps/lifecycle/function/base.py b/packageship/packageship/application/apps/lifecycle/function/base.py new file mode 100644 index 00000000..1d42b32a --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/function/base.py @@ -0,0 +1,40 @@ +#!/usr/bin/python3 +""" +General approach to version control tools +""" +import datetime as date +from dateutil.relativedelta import relativedelta +from packageship.libs.log import Log + + +class Base(): + """ + Public method to get project tags and download yaml file + """ + + def __init__(self): + self.headers = { + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW 64; rv:50.0) Gecko/20100101 \ + Firefox / 50.0 '} + self.log = Log(__name__) + + def format_date(self, date_time, month=0, format_str='%Y-%m-%d'): + """ + Date formatting operations + + """ + _date = date.datetime.strptime( + date_time, format_str) + _date = _date + relativedelta(month=month) + return _date + + def pkg_status(self, end_date): + """ + Get package status information according to the last validity period of the package + + """ + now_date = date.datetime.now() + maintainer_status = 'Available' + if (end_date - now_date).days < 0: + maintainer_status = "Overdue" + return maintainer_status diff --git a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py new file mode 100644 index 00000000..5734db6c --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py @@ -0,0 +1,154 @@ +#!/usr/bin/python3 +""" +Dynamically obtain the content of the yaml file \ +that saves the package information, periodically \ +obtain the content and save it in the database +""" +from concurrent.futures import ThreadPoolExecutor +import requests +import yaml +from sqlalchemy.exc import SQLAlchemyError +from requests.exceptions import HTTPError +from packageship.application.models.package import packages +from packageship.application.models.package import packages_issue +from packageship.libs.dbutils import DBHelper +from packageship.libs.exception import Error, ContentNoneException +from .base import Base + + +class ParseYaml(): + """ + Description: Download the contents of the yaml file + + Attributes: + base: base class instance + pkg: Specific package data + _table_name: The name of the data table to be operated + _owner: The address of the corporate warehouse + _repo: The address of the source code repository + openeuler_advisor_url: Get the warehouse address of the yaml file + _yaml_content: The content of the yaml file + """ + + def __init__(self, pkg_info, base, table_name): + self.base = base + self.pkg = pkg_info + self._table_name = table_name + self._owner = "src-openeuler" + self._repo = self.pkg.name + self.openeuler_advisor_url = \ + 'https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/{name}.yaml'\ + .format(name=pkg_info.name) + self._yaml_content = None + + def update_pkg_info(self): + """ + Download the contents of the yaml file + + """ + if self._openeuler_advisor_exists_yaml(): + self._save_to_database() + else: + msg = "The yaml information of the %s package has not been\ + obtained yet" % self.pkg.name + self.base.log.logger.warning(msg) + + def _read_yaml_content(self, url): + """ + + """ + try: + response = requests.get( + url, headers=self.base.headers) + if response.status_code == 200: + self._yaml_content = yaml.safe_load(response.content) + + except HTTPError as error: + self.base.log.logger.error(error) + + def _openeuler_advisor_exists_yaml(self): + """ + Determine whether there is a yaml file with the current \ + package name under the openeuler-advisor project + + """ + self._read_yaml_content(self.openeuler_advisor_url) + if self._yaml_content: + return True + return False + + def _save_to_database(self): + """ + Save the acquired yaml file information to the database + + Raises: + ContentNoneException: The added entity content is empty + Error: An error occurred during data addition + """ + self._parse_warehouse_info() + tags = self._yaml_content.get('git_tag', None) + self._parse_tags_content(tags) + # TODO 获取issue列表数据,然后进行保存 + # Save data to the database + try: + with DBHelper(db_name="lifecycle") as database: + database.add(self.pkg) + database.batch_add([], packages_issue) + except (Error, ContentNoneException, SQLAlchemyError) as error: + self.base.log.logger.error(error) + + def _parse_warehouse_info(self): + """ + Parse the warehouse information in the yaml file + + """ + if self._yaml_content: + self.pkg.version_control = self._yaml_content.get( + 'version_control') + self.pkg.src_repo = self._yaml_content.get('src_repo') + self.pkg.tag_prefix = self._yaml_content.get('tag_prefix') + + def _parse_tags_content(self, tags): + """ + Parse the obtained tags content + + """ + try: + # Integrate tags information into key-value pairs + _tags = [(tag.split()[0], tag.split()[1]) for tag in tags] + _tags = sorted(_tags, key=lambda x: x[0], reverse=True) + self.pkg.latest_version = _tags[0][1] + self.pkg.latest_version_time = _tags[0][0] + _end_time = self.base.format_date( + self.pkg.latest_version_time, month=6) + if self.pkg.latest_version != self.pkg.version: + _end_time = self.base.format_date( + self.pkg.latest_version_time, month=3) + self.pkg.maintainer_status = self.base.pkg_status( + _end_time) + self.pkg.end_time = _end_time.strftime("%Y-%m-%d") + + except (IndexError,) as index_error: + self.base.log.logger.error(index_error) + + +def update_pkg_info(): + """ + Update the information of the upstream warehouse in the source package + + """ + try: + base_control = Base() + pool = ThreadPoolExecutor(max_workers=10) + with DBHelper(db_name="lifecycle") as database: + for table_name in filter(lambda x: x.endswith("_pkg"), database.engine.table_names()): + cls_model = type("packages", (packages, DBHelper.BASE), { + '__tablename__': table_name}) + # Query a specific table + for package_item in database.session.query(cls_model).all(): + parse_yaml = ParseYaml( + pkg_info=package_item, base=base_control, table_name=table_name) + pool.submit(parse_yaml.update_pkg_info) + pool.shutdown() + except SQLAlchemyError as error_msg: + base_control.log.logger.error(error_msg) diff --git a/packageship/packageship/application/apps/lifecycle/function/pypi.py b/packageship/packageship/application/apps/lifecycle/function/pypi.py new file mode 100644 index 00000000..6ef12d07 --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/function/pypi.py @@ -0,0 +1,84 @@ +#!/usr/bin/python3 +import json +import requests +from requests.exceptions import HTTPError +from packageship.libs.exception import Error +from .base import Base + + +class Pypi(Base): + """ + github version management tool related information acquisition + + + """ + + def __init__(self, pkg_info, base): + super(Pypi, self).__init__() + self._base = base + self.pkg = pkg_info + self.url = "https://pypi.org/pypi/{src_repos}/json".format( + src_repos=self.pkg.src_repo) + self.response_dict = None + + def _resp_to_json(self, response): + """ + Parse the response content and get tags + """ + try: + self.response_dict = json.loads(response.text) + except (ValueError, Error) as val_error: + self.log.logger.error(val_error) + + def _parse_tags_content(self): + """ + Parse the obtained tags content + + """ + try: + self.pkg.latest_version = self.response_dict['info']['version'] + self.pkg.latest_version_time = self.response_dict[ + "releases"][self.pkg.latest_version][-1]["upload_time"] + if self.pkg.latest_version_time: + _end_date = self.format_date( + self.pkg.latest_version_time.split('T')[0], month=6) + + if self.pkg.latest_version != self.pkg.version: + _end_date = self.format_date(self._get_publish_info(), month=3) + + self.pkg.maintainer_status = self.pkg_status(_end_date) + + except KeyError as key_error: + self.log.logger.error(key_error) + + def _get_publish_info(self): + """ + + """ + try: + _publish_date = self.response_dict["releases"][self.pkg.version][-1]["upload_time"] + if _publish_date: + _publish_date = _publish_date.split('T')[0] + return _publish_date + except KeyError as key_error: + self.log.logger.error(key_error) + + def update_pkg_info(self): + """ + Get the yaml file storing the current package information according to the package name + + """ + self._get_tags() + + def _get_tags(self): + """ + Get information about project release + """ + try: + response = requests.get(self.url, headers=self._base.headers) + if response.status_code == 200: + self._resp_to_json(response) + if self.response_dict: + self._parse_tags_content() + except HTTPError as error: + self.log.logger.error(error) diff --git a/packageship/packageship/application/apps/lifecycle/url.py b/packageship/packageship/application/apps/lifecycle/url.py new file mode 100644 index 00000000..e69de29b diff --git a/packageship/packageship/application/apps/lifecycle/view.py b/packageship/packageship/application/apps/lifecycle/view.py new file mode 100644 index 00000000..e69de29b diff --git a/packageship/packageship/application/initsystem/data_import.py b/packageship/packageship/application/initsystem/data_import.py index 9ca67a08..0537a56f 100644 --- a/packageship/packageship/application/initsystem/data_import.py +++ b/packageship/packageship/application/initsystem/data_import.py @@ -19,7 +19,6 @@ from packageship.application.models.package import bin_pack from packageship.application.models.package import bin_requires from packageship.application.models.package import src_requires from packageship.application.models.package import bin_provides -from packageship.application.models.package import maintenance_info from packageship import system_config LOGGER = Log(__name__) @@ -113,20 +112,6 @@ class InitDataBase(): raise IOError( 'An error occurred while deleting the database configuration file') - # Create a database maintained by benchmark information - if self.db_type == 'mysql': - MysqlDatabaseOperations( - db_name='maintenance.information', - tables=['maintenance_info'], - is_datum=True).create_database() - else: - SqliteDatabaseOperations( - db_name='maintenance.information', - tables=['maintenance_info'], - is_datum=True).create_database() - # Obtain the maintenance information of the previous data of the system - self._get_maintenance_info() - for database in self.config_file_datas: if not database.get('dbname'): LOGGER.logger.error( @@ -200,7 +185,7 @@ class InitDataBase(): raise ContentNoneException( 'The path to the sqlite file in the database initialization configuration \ is incorrect ') - if os.path.exists(src_db_file) or os.path.exists(bin_db_file): + if not os.path.exists(src_db_file) or not os.path.exists(bin_db_file): raise FileNotFoundError("sqlite file {src} or {bin} does not exist, please \ check and try again".format(src=src_db_file, bin=bin_db_file)) # 3. Obtain temporary source package files and binary package files @@ -318,12 +303,6 @@ class InitDataBase(): raise ContentNoneException( '{db_name}:There is no relevant data in the source \ package provided '.format(db_name=db_name)) - for index, src_package_item in enumerate(packages_datas): - maintaniner, maintainlevel = self._get_mainter_info( - src_package_item.get('name'), src_package_item.get('version')) - packages_datas[index]['maintaniner'] = maintaniner - packages_datas[index]['maintainlevel'] = maintainlevel - with DBHelper(db_name=db_name) as database: database.batch_add(packages_datas, src_pack) @@ -419,52 +398,6 @@ class InitDataBase(): with DBHelper(db_name=db_name) as database: database.batch_add(provides_datas, bin_provides) - def _get_maintenance_info(self): - """ - Description: Obtain the information of the maintainer - - Returns: - Maintainer related information - Raises: - SQLAlchemyError: An error occurred while executing the sql statement - """ - try: - with DBHelper(db_name='maintenance.information') as database: - for info in database.session.query(maintenance_info).all(): - if info.name not in self.mainter_infos.keys(): - self.mainter_infos[info.name] = [] - self.mainter_infos[info.name].append({ - 'version': info.version, - 'maintaniner': info.maintaniner, - 'maintainlevel': info.maintainlevel - }) - except SQLAlchemyError as sql_error: - LOGGER.logger.error(sql_error) - - def _get_mainter_info(self, src_package_name, version): - ''' - Get the maintainer information of the source package - - Args: - src_package_name: Source package name - version: Source package version number - Returns: - Maintainer's name - Raises: - - ''' - maintenance_infos = self.mainter_infos.get(src_package_name) - maintaniner = None - if maintenance_infos: - for maintenance_item in maintenance_infos: - if maintenance_item.get('version') == version: - maintaniner = (maintenance_item.get( - 'maintaniner'), maintenance_item.get('maintainlevel')) - break - if maintaniner is None: - maintaniner = (None, None) - return maintaniner - def __exists_repeat_database(self): """ Determine if the same database name exists @@ -569,6 +502,25 @@ class InitDataBase(): finally: file_read.close() + def create_life_cycle_db(self, db_name, tables=None): + """ + Create databases and tables related to the package life cycle + + Args: + db_name: The name of the database + tables: Table to be created + """ + database_engine = SqliteDatabaseOperations( + db_name=db_name, + tables=tables, + is_datum=True) + if self.db_type == 'mysql': + database_engine = MysqlDatabaseOperations( + db_name=db_name, + tables=tables, + is_datum=True) + return database_engine.create_database() + class MysqlDatabaseOperations(): """ @@ -612,7 +564,10 @@ class MysqlDatabaseOperations(): if not self.is_datum: data_base.session.execute(self.drop_database_sql) data_base.session.execute(self.create_database_sql) - except (SQLAlchemyError, InternalError) as exception_msg: + except InternalError as internal_error: + LOGGER.logger.info(internal_error) + return True + except SQLAlchemyError as exception_msg: LOGGER.logger.error(exception_msg) return False else: @@ -657,7 +612,9 @@ class MysqlDatabaseOperations(): try: with DBHelper(db_name=self.db_name) as database: if self.tables: - database.create_table(self.tables) + _tables = list(set(self.tables).difference( + set(database.engine.table_names()))) + database.create_table(_tables) except SQLAlchemyError as exception_msg: LOGGER.logger.error(exception_msg) @@ -730,14 +687,17 @@ class SqliteDatabaseOperations(): _db_file = os.path.join( self.database_file_folder, self.db_name) - if os.path.exists(_db_file + '.db'): + if not self.is_datum and os.path.exists(_db_file + '.db'): os.remove(_db_file + '.db') # create a sqlite database if (self.is_datum and not os.path.exists(_db_file + '.db')) or not self.is_datum: with DBHelper(db_name=_db_file) as database: try: - database.create_table(self.tables) + if self.tables: + _tables = list(set(self.tables).difference( + set(database.engine.table_names()))) + database.create_table(_tables) except (SQLAlchemyError, InternalError) as create_table_err: LOGGER.logger.error(create_table_err) return None diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py index 725f4af7..3f170d41 100644 --- a/packageship/packageship/application/models/package.py +++ b/packageship/packageship/application/models/package.py @@ -2,7 +2,7 @@ """ Description: Database entity model mapping """ -from sqlalchemy import Column, Integer, String +from sqlalchemy import Column, Integer, String, Text from packageship.libs.dbutils.sqlalchemy_helper import DBHelper @@ -141,3 +141,44 @@ class maintenance_info(DBHelper.BASE): # pylint: disable=C0103,R0903 maintaniner = Column(String(100), nullable=True) maintainlevel = Column(String(100), nullable=True) + + +class packages(): # pylint: disable=C0103,R0903 + """ + Source code package version, issuer and other information + """ + id = Column(Integer, primary_key=True) + name = Column(String(500), nullable=True) + url = Column(String(500), nullable=True) + rpm_license = Column(String(500), nullable=True) + version = Column(String(200), nullable=True) + release = Column(String(200), nullable=True) + release_time = Column(String(50), nullable=True) + end_time = Column(String(50), nullable=True) + maintainer_status = Column(String(20), nullable=True, default="Available") + latest_version = Column(String(200), nullable=True) + latest_version_time = Column(String(50), nullable=True) + demand = Column(Integer, default=0) + cve = Column(Integer, default=0) + defect = Column(Integer, default=0) + maintainer = Column(String(200), nullable=True) + maintainlevel = Column(Integer, nullable=True) + feature = Column(String(500), nullable=True) + version_control = Column(String(50), nullable=False) + src_repo = Column(String(500), nullable=False) + tag_prefix = Column(String(20), nullable=True) + + +class packages_issue(DBHelper.BASE): # pylint: disable=C0103,R0903 + """ + Source package issue + """ + __tablename__ = "packages_issue" + id = Column(Integer, primary_key=True) + issue_id = Column(String(50), nullable=True) + issue_url = Column(String(500), nullable=True) + issue_content = Column(Text, nullable=True) + issue_title = Column(String(1000), nullable=True) + issue_status = Column(String(20), nullable=True) + name = Column(String(500), nullable=False) + issue_download = Column(String(500), nullable=False) diff --git a/packageship/packageship/application/settings.py b/packageship/packageship/application/settings.py index bc090439..f6516125 100644 --- a/packageship/packageship/application/settings.py +++ b/packageship/packageship/application/settings.py @@ -19,6 +19,8 @@ class Config(): LOG_LEVEL = 'INFO' + SCHEDULER_API_ENABLED = True + def __init__(self): self._read_config = ReadConfig() diff --git a/packageship/packageship/selfpkg.py b/packageship/packageship/selfpkg.py index b5878168..0df25a46 100644 --- a/packageship/packageship/selfpkg.py +++ b/packageship/packageship/selfpkg.py @@ -25,6 +25,8 @@ except Error as error: raise Exception('Service failed to start') else: from packageship.application.app_global import identity_verification + from packageship.application.apps.lifecycle.function import start_tasks + start_tasks() @app.before_request -- Gitee From d5357b13dcef4e556e3fe6ddf4bac39a0b6f6e06 Mon Sep 17 00:00:00 2001 From: gongzt Date: Thu, 30 Jul 2020 14:07:34 +0800 Subject: [PATCH 06/19] Judgment whether the file exists --- packageship/packageship/selfpkg.py | 1 - packageship/packageship/system_config.py | 15 +++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packageship/packageship/selfpkg.py b/packageship/packageship/selfpkg.py index 0df25a46..4b9a1028 100644 --- a/packageship/packageship/selfpkg.py +++ b/packageship/packageship/selfpkg.py @@ -3,7 +3,6 @@ Description: Entry for project initialization and service startupc """ import os -from flask_script import Manager from packageship.libs.exception import Error from packageship.libs.configutils.readconfig import ReadConfig diff --git a/packageship/packageship/system_config.py b/packageship/packageship/system_config.py index a6ab994c..14de44a9 100644 --- a/packageship/packageship/system_config.py +++ b/packageship/packageship/system_config.py @@ -1,8 +1,8 @@ #!/usr/bin/python3 -''' -System-level file configuration, mainly configure +""" +Description:System-level file configuration, mainly configure the address of the operating environment, commonly used variables, etc. -''' +""" import os import sys @@ -16,19 +16,18 @@ else: # system configuration file path -SYS_CONFIG_PATH = os.path.join(BASE_PATH, 'package.ini') - +SYS_CONFIG_PATH = os.path.join('/', 'etc', 'pkgship', 'package.ini') # data file after successful data import DATABASE_FILE_INFO = os.path.join( - BASE_PATH, 'pkgship_dbs', 'database_file_info.yaml') + '/', 'var', 'run', 'database_file_info.yaml') # If the path of the imported database is not specified in the configuration file, the # configuration in the system is used by default -DATABASE_FOLDER_PATH = os.path.join(BASE_PATH, 'pkgship_dbs') +DATABASE_FOLDER_PATH = os.path.join('/', 'var', 'run', 'pkgship_dbs') # If the directory of log storage is not configured, # it will be stored in the following directory specified by the system by default -LOG_FOLDER_PATH = os.path.join(BASE_PATH, 'logs') +LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'pkgship') -- Gitee From 58e30fdf5a2afb13efbe409818f93e39776abcd8 Mon Sep 17 00:00:00 2001 From: gongzt Date: Tue, 4 Aug 2020 13:50:15 +0800 Subject: [PATCH 07/19] =?UTF-8?q?issue=E4=BF=9D=E5=AD=98=E7=9A=84=E6=96=B9?= =?UTF-8?q?=E6=B3=95=E8=B0=83=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../apps/lifecycle/function/__init__.py | 6 ++- .../apps/lifecycle/function/download_yaml.py | 53 +++++++++++++++++-- .../application/apps/lifecycle/url.py | 1 + .../packageship/application/models/package.py | 2 + .../libs/dbutils/sqlalchemy_helper.py | 2 + packageship/packageship/pkgship.py | 2 +- 6 files changed, 60 insertions(+), 6 deletions(-) diff --git a/packageship/packageship/application/apps/lifecycle/function/__init__.py b/packageship/packageship/application/apps/lifecycle/function/__init__.py index 3bd2ad10..a26d2453 100644 --- a/packageship/packageship/application/apps/lifecycle/function/__init__.py +++ b/packageship/packageship/application/apps/lifecycle/function/__init__.py @@ -11,5 +11,7 @@ def start_tasks(): Start of timing tasks, used to register timing tasks that need to be executed """ - app.apscheduler.add_job( # pylint: disable=no-member - func=update_pkg_info, id="update_package_data", trigger="interval", seconds=30) + # app.apscheduler.add_job( # pylint: disable=no-member + # func=update_pkg_info, id="update_package_data", trigger="interval", day_of_week=0) + + update_pkg_info() diff --git a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py index 5734db6c..e61506d2 100644 --- a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py +++ b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py @@ -14,6 +14,7 @@ from packageship.application.models.package import packages_issue from packageship.libs.dbutils import DBHelper from packageship.libs.exception import Error, ContentNoneException from .base import Base +from .gitee import Gitee class ParseYaml(): @@ -88,15 +89,61 @@ class ParseYaml(): self._parse_warehouse_info() tags = self._yaml_content.get('git_tag', None) self._parse_tags_content(tags) - # TODO 获取issue列表数据,然后进行保存 # Save data to the database + # issue_list = Gitee(self.pkg, self._table_name, self._owner, + # self._repo).execute_request_content_save() + + issue_list = [ + { + 'issue_id': 'I1PGWQ', + 'issue_url': 'https://gitee.com/openeuler/openEuler-Advisor/issues/I1PGWQ', + 'issue_content': ' get_yaml 接口的返回值类型有str和bool', + 'issue_status': 'open', + 'issue_title': '测试1', + 'name': 'kata-proxy', + 'issue_download': '', + 'issue_type': '需求', + 'related_release': '11' + + }, + { + 'issue_id': 'I1OQW0', + 'issue_url': 'https://gitee.com/openeuler/openEuler-Advisor/issues/I1OQW0', + 'issue_content': '【CI加固】对', + 'issue_status': 'open', + 'issue_title': '测试2', + 'name': 'kata-proxy', + 'issue_download': '', + 'issue_type': '安全', + 'related_release': '223' + } + ] try: with DBHelper(db_name="lifecycle") as database: database.add(self.pkg) - database.batch_add([], packages_issue) + self._save_issues(issue_list, database) except (Error, ContentNoneException, SQLAlchemyError) as error: self.base.log.logger.error(error) + def _save_issues(self, issue_list, database): + """ + Save the obtained issue information + + """ + issue_ids = [issue['issue_id'] for issue in issue_list] + exist_issues = database.session.query(packages_issue).filter( + packages_issue.issue_id.in_(issue_ids)).all() # pylint: disable=protected-access + add_issue_list = [] + for issue_item in issue_list: + issue_model = [ + issue for issue in exist_issues if issue.issue_id == issue_item['issue_id']] + if issue_model: + for key, val in issue_item.items(): + setattr(issue_model[0], key, val) + else: + add_issue_list.append(issue_item) + database.batch_add(add_issue_list, packages_issue) + def _parse_warehouse_info(self): """ Parse the warehouse information in the yaml file @@ -141,7 +188,7 @@ def update_pkg_info(): base_control = Base() pool = ThreadPoolExecutor(max_workers=10) with DBHelper(db_name="lifecycle") as database: - for table_name in filter(lambda x: x.endswith("_pkg"), database.engine.table_names()): + for table_name in filter(lambda x: x != 'packages_issue', database.engine.table_names()): cls_model = type("packages", (packages, DBHelper.BASE), { '__tablename__': table_name}) # Query a specific table diff --git a/packageship/packageship/application/apps/lifecycle/url.py b/packageship/packageship/application/apps/lifecycle/url.py index e69de29b..d3423408 100644 --- a/packageship/packageship/application/apps/lifecycle/url.py +++ b/packageship/packageship/application/apps/lifecycle/url.py @@ -0,0 +1 @@ +urls = [] diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py index 3f170d41..4ea6b476 100644 --- a/packageship/packageship/application/models/package.py +++ b/packageship/packageship/application/models/package.py @@ -182,3 +182,5 @@ class packages_issue(DBHelper.BASE): # pylint: disable=C0103,R0903 issue_status = Column(String(20), nullable=True) name = Column(String(500), nullable=False) issue_download = Column(String(500), nullable=False) + issue_type = Column(String(50), nullable=True) + related_release = Column(String(500), nullable=True) diff --git a/packageship/packageship/libs/dbutils/sqlalchemy_helper.py b/packageship/packageship/libs/dbutils/sqlalchemy_helper.py index 228aee22..70e3489a 100644 --- a/packageship/packageship/libs/dbutils/sqlalchemy_helper.py +++ b/packageship/packageship/libs/dbutils/sqlalchemy_helper.py @@ -211,6 +211,7 @@ class DBHelper(): self.session.add(entity) except SQLAlchemyError as sql_error: + self.session.rollback() raise Error(sql_error) else: self.session.commit() @@ -246,6 +247,7 @@ class DBHelper(): dicts ) except SQLAlchemyError as sql_error: + self.session.rollback() raise Error(sql_error) else: self.session.commit() diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index 95b16486..2f7d5bc1 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -621,7 +621,7 @@ class UpdatePackageCommand(PkgshipCommand): Raises: ConnectionError: Request connection error """ - _url = self.write_host + '/packages/findByPackName' + _url = self.write_host + '/packages/packageInfo' try: response = requests.put( _url, data=json.dumps({'sourceName': params.packagename, -- Gitee From 95922985c2f18207c6d5bca4cc511e256499eec6 Mon Sep 17 00:00:00 2001 From: gongzt Date: Sat, 8 Aug 2020 11:15:36 +0800 Subject: [PATCH 08/19] API interface development such as downloading documents --- .../application/apps/lifecycle/__init__.py | 2 +- .../apps/lifecycle/function/__init__.py | 6 +- .../apps/lifecycle/function/download_yaml.py | 32 +-- .../apps/lifecycle/function/pypi.py | 84 ------- .../application/apps/lifecycle/serialize.py | 38 ++++ .../application/apps/lifecycle/url.py | 14 +- .../application/apps/lifecycle/view.py | 206 ++++++++++++++++++ .../apps/package/function/constants.py | 4 +- .../packageship/application/models/package.py | 5 +- packageship/packageship/libs/log/loghelper.py | 29 ++- packageship/packageship/package.ini | 4 + packageship/packageship/pkgship.py | 170 ++++++++++++--- packageship/packageship/system_config.py | 8 +- 13 files changed, 438 insertions(+), 164 deletions(-) delete mode 100644 packageship/packageship/application/apps/lifecycle/function/pypi.py create mode 100644 packageship/packageship/application/apps/lifecycle/serialize.py diff --git a/packageship/packageship/application/apps/lifecycle/__init__.py b/packageship/packageship/application/apps/lifecycle/__init__.py index b6a458e7..d17a06a5 100644 --- a/packageship/packageship/application/apps/lifecycle/__init__.py +++ b/packageship/packageship/application/apps/lifecycle/__init__.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 """ - + Blueprint registration for life cycle """ from flask.blueprints import Blueprint from flask_restful import Api diff --git a/packageship/packageship/application/apps/lifecycle/function/__init__.py b/packageship/packageship/application/apps/lifecycle/function/__init__.py index a26d2453..4d4d49b0 100644 --- a/packageship/packageship/application/apps/lifecycle/function/__init__.py +++ b/packageship/packageship/application/apps/lifecycle/function/__init__.py @@ -11,7 +11,7 @@ def start_tasks(): Start of timing tasks, used to register timing tasks that need to be executed """ - # app.apscheduler.add_job( # pylint: disable=no-member - # func=update_pkg_info, id="update_package_data", trigger="interval", day_of_week=0) + app.apscheduler.add_job( # pylint: disable=no-member + func=update_pkg_info, id="update_package_data", trigger="interval", day_of_week=0) - update_pkg_info() + # update_pkg_info() diff --git a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py index e61506d2..d416c13c 100644 --- a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py +++ b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py @@ -14,7 +14,7 @@ from packageship.application.models.package import packages_issue from packageship.libs.dbutils import DBHelper from packageship.libs.exception import Error, ContentNoneException from .base import Base -from .gitee import Gitee +# from .gitee import Gitee class ParseYaml(): @@ -92,32 +92,7 @@ class ParseYaml(): # Save data to the database # issue_list = Gitee(self.pkg, self._table_name, self._owner, # self._repo).execute_request_content_save() - - issue_list = [ - { - 'issue_id': 'I1PGWQ', - 'issue_url': 'https://gitee.com/openeuler/openEuler-Advisor/issues/I1PGWQ', - 'issue_content': ' get_yaml 接口的返回值类型有str和bool', - 'issue_status': 'open', - 'issue_title': '测试1', - 'name': 'kata-proxy', - 'issue_download': '', - 'issue_type': '需求', - 'related_release': '11' - - }, - { - 'issue_id': 'I1OQW0', - 'issue_url': 'https://gitee.com/openeuler/openEuler-Advisor/issues/I1OQW0', - 'issue_content': '【CI加固】对', - 'issue_status': 'open', - 'issue_title': '测试2', - 'name': 'kata-proxy', - 'issue_download': '', - 'issue_type': '安全', - 'related_release': '223' - } - ] + issue_list = [] try: with DBHelper(db_name="lifecycle") as database: database.add(self.pkg) @@ -188,7 +163,8 @@ def update_pkg_info(): base_control = Base() pool = ThreadPoolExecutor(max_workers=10) with DBHelper(db_name="lifecycle") as database: - for table_name in filter(lambda x: x != 'packages_issue', database.engine.table_names()): + for table_name in filter(lambda x: x != 'packages_issue', + database.engine.table_names()): cls_model = type("packages", (packages, DBHelper.BASE), { '__tablename__': table_name}) # Query a specific table diff --git a/packageship/packageship/application/apps/lifecycle/function/pypi.py b/packageship/packageship/application/apps/lifecycle/function/pypi.py deleted file mode 100644 index 6ef12d07..00000000 --- a/packageship/packageship/application/apps/lifecycle/function/pypi.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/python3 -import json -import requests -from requests.exceptions import HTTPError -from packageship.libs.exception import Error -from .base import Base - - -class Pypi(Base): - """ - github version management tool related information acquisition - - - """ - - def __init__(self, pkg_info, base): - super(Pypi, self).__init__() - self._base = base - self.pkg = pkg_info - self.url = "https://pypi.org/pypi/{src_repos}/json".format( - src_repos=self.pkg.src_repo) - self.response_dict = None - - def _resp_to_json(self, response): - """ - Parse the response content and get tags - """ - try: - self.response_dict = json.loads(response.text) - except (ValueError, Error) as val_error: - self.log.logger.error(val_error) - - def _parse_tags_content(self): - """ - Parse the obtained tags content - - """ - try: - self.pkg.latest_version = self.response_dict['info']['version'] - self.pkg.latest_version_time = self.response_dict[ - "releases"][self.pkg.latest_version][-1]["upload_time"] - if self.pkg.latest_version_time: - _end_date = self.format_date( - self.pkg.latest_version_time.split('T')[0], month=6) - - if self.pkg.latest_version != self.pkg.version: - _end_date = self.format_date(self._get_publish_info(), month=3) - - self.pkg.maintainer_status = self.pkg_status(_end_date) - - except KeyError as key_error: - self.log.logger.error(key_error) - - def _get_publish_info(self): - """ - - """ - try: - _publish_date = self.response_dict["releases"][self.pkg.version][-1]["upload_time"] - if _publish_date: - _publish_date = _publish_date.split('T')[0] - return _publish_date - except KeyError as key_error: - self.log.logger.error(key_error) - - def update_pkg_info(self): - """ - Get the yaml file storing the current package information according to the package name - - """ - self._get_tags() - - def _get_tags(self): - """ - Get information about project release - """ - try: - response = requests.get(self.url, headers=self._base.headers) - if response.status_code == 200: - self._resp_to_json(response) - if self.response_dict: - self._parse_tags_content() - except HTTPError as error: - self.log.logger.error(error) diff --git a/packageship/packageship/application/apps/lifecycle/serialize.py b/packageship/packageship/application/apps/lifecycle/serialize.py new file mode 100644 index 00000000..5ed854e9 --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/serialize.py @@ -0,0 +1,38 @@ +#!/usr/bin/python3 +""" +Description: marshmallow serialize +""" +from marshmallow import Schema +from marshmallow import fields +from marshmallow import validate +from packageship.application.models.package import packages_issue, packages + + +class IssueSchema(Schema): + """ + Description: IssueSchema serialize + """ + # openeuler 20 + tableName = fields.Str( + required=True, validate=validate.Length(min=1, max=200)) + # repo + packageName = fields.Str(validate=validate.Length( + max=200), required=False, allow_none=True) + page = fields.Integer(required=True) + per_page = fields.Integer(required=True) + + +class IssueDownloadSchema(Schema): + + class Meta: + model = packages_issue + fields = ('issue_id', 'issue_url', 'issue_content', + 'issue_title', 'issue_status', 'name', 'issue_type', 'related_release') + + +class PackagesDownloadSchema(Schema): + class Meta: + model = packages + fields = ('name', 'url', 'rpm_license', 'version', 'release', 'release_time', + 'end_time', 'maintainer_status', 'latest_version', 'latest_version_time', + 'demand', 'cve', 'defect', 'maintainer', 'maintainlevel', 'feature') diff --git a/packageship/packageship/application/apps/lifecycle/url.py b/packageship/packageship/application/apps/lifecycle/url.py index d3423408..387bacda 100644 --- a/packageship/packageship/application/apps/lifecycle/url.py +++ b/packageship/packageship/application/apps/lifecycle/url.py @@ -1 +1,13 @@ -urls = [] +#!/usr/bin/python3 +""" +Life cycle of url giant whale collection +""" +from . import view + + +urls = [ + (view.DownloadFile, '/lifeCycle/download/', {'query': ('GET')}), + (view.MaintainerView, '/lifeCycle/maintainer', {'query': ('GET')}), + (view.TableColView, '/packages/tablecol', {'query': ('GET')}), + +] diff --git a/packageship/packageship/application/apps/lifecycle/view.py b/packageship/packageship/application/apps/lifecycle/view.py index e69de29b..7c5a8242 100644 --- a/packageship/packageship/application/apps/lifecycle/view.py +++ b/packageship/packageship/application/apps/lifecycle/view.py @@ -0,0 +1,206 @@ +#!/usr/bin/python3 +""" +Life cycle related api interface +""" +import io +import pandas as pd +from flask import request +from flask import jsonify, make_response +from flask import current_app +from flask_restful import Resource +from sqlalchemy.exc import DisconnectionError, SQLAlchemyError +from packageship.libs.exception import Error +from packageship.application.apps.package.function.constants import ResponseCode +from packageship.libs.dbutils.sqlalchemy_helper import DBHelper +from packageship.application.models.package import packages_issue, packages +from .serialize import IssueDownloadSchema, PackagesDownloadSchema + + +def meta_model(table_name): + """ + The mapping relationship of the orm model + """ + model = type("packages", (packages, DBHelper.BASE), { + '__tablename__': table_name}) + return model + + +class DownloadFile(Resource): + """ + Download the content of the issue or the excel file of the package content + """ + + def _download_excel(self, file_type, table_name=None): + """ + Download excel file + """ + file_name = 'packages.xlsx' + if file_type == 'packages': + download_content = self.__get_packages_content(table_name) + else: + file_name = 'issues.xlsx' + download_content = self.__get_issues_content() + if download_content is None: + return jsonify(ResponseCode.response_json(ResponseCode.SERVICE_ERROR)) + pd_dataframe = self.__to_dataframe(download_content) + + _response = self.__bytes_save(pd_dataframe) + return self.__set_response_header(_response, file_name) + + def __bytes_save(self, data_frame): + """ + Save the file content in the form of a binary file stream + """ + try: + bytes_io = io.BytesIO() + writer = pd.ExcelWriter( # pylint: disable=abstract-class-instantiated + bytes_io, engine='xlsxwriter') + data_frame.to_excel(writer, sheet_name='结果汇总', index=False) + writer.save() + writer.close() + bytes_io.seek(0) + _response = make_response(bytes_io.getvalue()) + bytes_io.close() + return _response + except (IOError, Error) as io_error: + current_app.logger.error(io_error) + return make_response() + + def __set_response_header(self, response, file_name): + """ + Set http response header information + """ + response.headers['Content-Type'] = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + response.headers["Cache-Control"] = "no-cache" + response.headers['Content-Disposition'] = 'attachment; filename={file_name}'.format( + file_name=file_name) + return response + + def __get_packages_content(self, table_name): + """ + Get package list information + """ + try: + with DBHelper(db_name='lifecycle') as database: + # Query all package data in the specified table + _model = meta_model(table_name) + _packageinfos = database.session.query(_model).all() + packages_dicts = PackagesDownloadSchema( + many=True).dump(_packageinfos) + return packages_dicts + + except (SQLAlchemyError, DisconnectionError) as error: + current_app.logger.error(error) + return None + + def __get_issues_content(self): + """ + Get the list of issues + """ + try: + with DBHelper(db_name='lifecycle') as database: + _issues = database.session.query(packages_issue).all() + idsues_dicts = IssueDownloadSchema(many=True).dump(_issues) + return idsues_dicts + except (SQLAlchemyError, DisconnectionError) as error: + current_app.logger.error(error) + return None + + def __to_dataframe(self, datas): + """ + Convert the obtained information into pandas content format + """ + + data_frame = pd.DataFrame(datas) + return data_frame + + def get(self, file_type): + """ + Download package collection information and isse list information + + """ + if file_type not in ['packages', 'issues']: + return jsonify(ResponseCode.response_json(ResponseCode.PARAM_ERROR)) + + table_name = request.args.get('table_name', None) + response = self._download_excel(file_type, table_name) + return response + + +class MaintainerView(Resource): + """ + Maintainer name collection + """ + + def __query_maintainers(self, table_name): + """ + Query the names of all maintainers in the specified table + """ + try: + with DBHelper(db_name='lifecycle') as database: + model = meta_model(table_name) + maintainers = database.session.query( + model.maintainer).group_by(model.maintainer).all() + return [maintainer_item[0] for maintainer_item in maintainers] + except (SQLAlchemyError, DisconnectionError) as error: + current_app.logger.error(error) + return [] + + def get(self): + """ + Get the list of maintainers + """ + table_name = request.args.get('table_name', None) + if not table_name: + return jsonify(ResponseCode.response_json(ResponseCode.PARAM_ERROR)) + # Group query of the names of all maintainers in the current table + maintainers = self.__query_maintainers(table_name) + return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS, maintainers)) + + +class TableColView(Resource): + """ + The default column of the package shows the interface + """ + + def __columns_names(self): + """ + Mapping of column name and title + """ + columns = [ + ('name', 'Name', True), + ('version', 'Version', True), + ('release', 'Release', True), + ('url', 'Url', True), + ('linense', 'License', False), + ('feature', 'Feature', False), + ('maintainer', 'Maintainer', True), + ('maintainlevel', 'Maintenance Level', True), + ('release_time', 'Release Time', False), + ('end_of_lifecycle', 'End of Life Cycle', True), + ('maintainer_status', 'Maintain Status', True), + ('latest_version', 'Latest Version', False), + ('latest_version_time', 'Latest Version Release Time', False), + ('issue', 'Issue', True)] + return columns + + def __columns_mapping(self): + """ + + """ + columns = list() + for column in self.__columns_names(): + columns.append({ + 'column_name': column[0], + 'label': column[1], + 'default_selected': column[2] + }) + return columns + + def get(self): + """ + Get the default display column of the package + + """ + table_mapping_columns = self.__columns_mapping() + return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS, table_mapping_columns)) diff --git a/packageship/packageship/application/apps/package/function/constants.py b/packageship/packageship/application/apps/package/function/constants.py index 7efe4127..d201bcd2 100644 --- a/packageship/packageship/application/apps/package/function/constants.py +++ b/packageship/packageship/application/apps/package/function/constants.py @@ -42,6 +42,7 @@ class ResponseCode(): FILE_NOT_FOUND = "4006" # Database operation module error status code DELETE_DB_ERROR = "40051" + SERVICE_ERROR = "50000" CONFIGFILE_PATH_EMPTY = "50001" FAILED_CREATE_DATABASE_TABLE = "50002" TYPE_ERROR = "50003" @@ -64,7 +65,8 @@ class ResponseCode(): TYPE_ERROR: "The source code and binary path types in the initialization file are abnormal", DATA_MERGE_ERROR: "abnormal multi-file database integration", FILE_NOT_FIND_ERROR: "system initialization configuration file does not exist", - DIS_CONNECTION_DB: "Unable to connect to the database, check the database configuration"} + DIS_CONNECTION_DB: "Unable to connect to the database, check the database configuration", + SERVICE_ERROR: "An exception occurred in the system, please try again later"} @classmethod def response_json(cls, code, data=None): diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py index 4ea6b476..1413b516 100644 --- a/packageship/packageship/application/models/package.py +++ b/packageship/packageship/application/models/package.py @@ -147,6 +147,7 @@ class packages(): # pylint: disable=C0103,R0903 """ Source code package version, issuer and other information """ + __table_args__ = {'extend_existing': True} id = Column(Integer, primary_key=True) name = Column(String(500), nullable=True) url = Column(String(500), nullable=True) @@ -164,8 +165,8 @@ class packages(): # pylint: disable=C0103,R0903 maintainer = Column(String(200), nullable=True) maintainlevel = Column(Integer, nullable=True) feature = Column(String(500), nullable=True) - version_control = Column(String(50), nullable=False) - src_repo = Column(String(500), nullable=False) + version_control = Column(String(50), nullable=True) + src_repo = Column(String(500), nullable=True) tag_prefix = Column(String(20), nullable=True) diff --git a/packageship/packageship/libs/log/loghelper.py b/packageship/packageship/libs/log/loghelper.py index 190e43a7..92294feb 100644 --- a/packageship/packageship/libs/log/loghelper.py +++ b/packageship/packageship/libs/log/loghelper.py @@ -24,11 +24,13 @@ def setup_log(config=None): _level = 'INFO' logging.basicConfig(level=_level) path = READCONFIG.get_config('LOG', 'log_path') - if path is None: - log_name = READCONFIG.get_config('LOG', 'log_name') - if log_name is None: - log_name = 'log_info.log' + log_name = READCONFIG.get_config('LOG', 'log_name') + if not log_name: + log_name = 'log_info.log' + if not path: path = os.path.join(LOG_FOLDER_PATH, log_name) + else: + path = os.path.join(path, log_name) if not os.path.exists(path): try: os.makedirs(os.path.split(path)[0]) @@ -53,17 +55,20 @@ class Log(): def __init__(self, name=__name__, path=None): self.__name = name - self.__path = path + self.__file_handler = None - if self.__path is None: + + log_name = READCONFIG.get_config('LOG', 'log_name') + if not log_name: + log_name = 'log_info.log' + if path: + self.__path = os.path.join(LOG_FOLDER_PATH, path) + else: self.__path = READCONFIG.get_system('log_path') - log_name = READCONFIG.get_config('LOG', 'log_name') - if log_name is None: - log_name = 'log_info.log' - if self.__path is None: + if not self.__path: self.__path = os.path.join(LOG_FOLDER_PATH, log_name) - else: - self.__path = os.path.join(LOG_FOLDER_PATH, path) + else: + self.__path = os.path.join(self.__path, log_name) if not os.path.exists(self.__path): try: diff --git a/packageship/packageship/package.ini b/packageship/packageship/package.ini index f1e0875e..51f47ce9 100644 --- a/packageship/packageship/package.ini +++ b/packageship/packageship/package.ini @@ -25,6 +25,10 @@ write_ip_addr=127.0.0.1 query_ip_addr=127.0.0.1 +; The address of the remote service, the command line can directly +; call the remote service to complete the data request +remote_host=https://api.openeuler.org/pkgmanage + [DATABASE] diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index 290ac3cb..2447d0e2 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -84,9 +84,10 @@ class BaseCommand(): wirte_port = self._read_config.get_system('write_port') write_ip = self._read_config.get_system('write_ip_addr') - + if not all([write_ip, wirte_port]): + raise Error( + "The system does not configure the relevant port and ip correctly") _write_host = self.__http + write_ip + ":" + wirte_port - setattr(self, 'write_host', _write_host) def load_read_host(self): @@ -101,10 +102,21 @@ class BaseCommand(): read_port = self._read_config.get_system('query_port') read_ip = self._read_config.get_system('query_ip_addr') + if all([read_ip, read_port]): + _read_host = self.__http + read_ip + ":" + read_port - _read_host = self.__http + read_ip + ":" + read_port + setattr(self, 'read_host', _read_host) - setattr(self, 'read_host', _read_host) + def _set_read_host(self, remote=False): + """ + Set read domain name + """ + if remote: + _host = self._read_config.get_system('remote_host') + self.read_host = _host + if self.read_host is None: + raise Error( + "The system does not configure the relevant port and ip correctly") class PkgshipCommand(BaseCommand): @@ -161,9 +173,10 @@ class PkgshipCommand(BaseCommand): for command_params in self.params: self.parse.add_argument( # pylint: disable=E1101 command_params[0], - type=eval(command_params[1]), # pylint: disable=W0123 + # type=eval(command_params[1]), # pylint: disable=W0123 help=command_params[2], - default=command_params[3]) + default=command_params[3], + action=command_params[4]) @classmethod def parser_args(cls): @@ -361,7 +374,8 @@ class RemoveCommand(PkgshipCommand): super(RemoveCommand, self).__init__() self.parse = PkgshipCommand.subparsers.add_parser( 'rm', help='delete database operation') - self.params = [('db', 'str', 'name of the database operated', '')] + self.params = [ + ('db', 'str', 'name of the database operated', '', 'store')] def register(self): """ @@ -425,7 +439,7 @@ class InitDatabaseCommand(PkgshipCommand): self.parse = PkgshipCommand.subparsers.add_parser( 'init', help='initialization of the database') self.params = [ - ('-filepath', 'str', 'name of the database operated', '')] + ('-filepath', 'str', 'name of the database operated', '', 'store')] def register(self): """ @@ -486,7 +500,8 @@ class UpdateDatabaseCommand(PkgshipCommand): self.parse = PkgshipCommand.subparsers.add_parser( 'updatedb', help='database update operation') - self.params = [('db', 'str', 'name of the database operated', '')] + self.params = [ + ('db', 'str', 'name of the database operated', '', 'store')] def register(self): """ @@ -533,7 +548,8 @@ class AllPackageCommand(PkgshipCommand): 'list', help='get all package data') self.table = self.create_table( ['packagenames', 'database', 'version', 'license']) - self.params = [('-db', 'str', 'name of the database operated', '')] + self.params = [('-db', 'str', 'name of the database operated', '', 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true')] def register(self): """ @@ -558,6 +574,7 @@ class AllPackageCommand(PkgshipCommand): Raises: ConnectionError: Request connection error """ + self._set_read_host(params.remote) _url = self.read_host + \ '/packages?dbName={dbName}'.format(dbName=params.db) try: @@ -592,10 +609,11 @@ class UpdatePackageCommand(PkgshipCommand): self.parse = PkgshipCommand.subparsers.add_parser( 'updatepkg', help='update package data') self.params = [ - ('packagename', 'str', 'Source package name', ''), - ('db', 'str', 'name of the database operated', ''), - ('-m', 'str', 'Maintainers name', ''), - ('-l', 'int', 'database priority', 1) + ('packagename', 'str', 'Source package name', '', 'store'), + ('db', 'str', 'name of the database operated', '', 'store'), + ('-m', 'str', 'Maintainers name', '', 'store'), + ('-l', 'int', 'database priority', 1, 'store'), + ('-t', 'str', 'package expiry date', '', 'store') ] def register(self): @@ -624,10 +642,11 @@ class UpdatePackageCommand(PkgshipCommand): _url = self.write_host + '/packages/packageInfo' try: response = requests.put( - _url, data=json.dumps({'sourceName': params.packagename, - 'dbName': params.db, + _url, data=json.dumps({'srcname': params.packagename, + 'tbname': params.db, 'maintainer': params.m, - 'maintainlevel': params.l}), + 'maintainlevel': params.l, + 'end_time': params.t}), headers=self.headers) except ConnErr as conn_error: LOGGER.logger.error(conn_error) @@ -664,7 +683,8 @@ class BuildDepCommand(PkgshipCommand): 'builddep', help='query the compilation dependencies of the specified package') self.collection = True self.params = [ - ('packagename', 'str', 'source package name', ''), + ('packagename', 'str', 'source package name', '', 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] self.collection_params = [ ('-dbs', 'Operational database collection') @@ -698,6 +718,8 @@ class BuildDepCommand(PkgshipCommand): Raises: ConnectionError: Request connection error """ + self._set_read_host(params.remote) + _url = self.read_host + '/packages/findBuildDepend' try: response = requests.post( @@ -741,7 +763,8 @@ class InstallDepCommand(PkgshipCommand): 'installdep', help='query the installation dependencies of the specified package') self.collection = True self.params = [ - ('packagename', 'str', 'source package name', ''), + ('packagename', 'str', 'source package name', '', 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] self.collection_params = [ ('-dbs', 'Operational database collection') @@ -829,6 +852,8 @@ class InstallDepCommand(PkgshipCommand): Raises: ConnectionError: requests connection error """ + self._set_read_host(params.remote) + _url = self.read_host + '/packages/findInstallDepend' try: response = requests.post(_url, data=json.dumps( @@ -877,10 +902,11 @@ class SelfBuildCommand(PkgshipCommand): self.src_package_table = self.create_table([ 'src name', 'version', 'database']) self.params = [ - ('packagename', 'str', 'source package name', ''), - ('-t', 'str', 'Source of data query', 'binary'), - ('-w', 'str', 'whether to include other subpackages of binary', 0), - ('-s', 'str', 'whether it is self-compiled', 0) + ('packagename', 'str', 'source package name', '', 'store'), + ('-t', 'str', 'Source of data query', 'binary', 'store'), + ('-w', 'str', 'whether to include other subpackages of binary', 0, 'store'), + ('-s', 'str', 'whether it is self-compiled', 0, 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] self.collection_params = [ @@ -1020,6 +1046,7 @@ class SelfBuildCommand(PkgshipCommand): Raises: ConnectionError: requests connection error """ + self._set_read_host(params.remote) _url = self.read_host + '/packages/findSelfDepend' try: response = requests.post(_url, @@ -1067,9 +1094,10 @@ class BeDependCommand(PkgshipCommand): self.parse = PkgshipCommand.subparsers.add_parser( 'bedepend', help='dependency query for the specified package') self.params = [ - ('packagename', 'str', 'source package name', ''), - ('db', 'str', 'name of the database operated', ''), - ('-w', 'str', 'whether to include other subpackages of binary', 0), + ('packagename', 'str', 'source package name', '', 'store'), + ('db', 'str', 'name of the database operated', '', 'store'), + ('-w', 'str', 'whether to include other subpackages of binary', 0, 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] def register(self): @@ -1095,6 +1123,7 @@ class BeDependCommand(PkgshipCommand): Raises: ConnectionError: requests connection error """ + self._set_read_host(params.remote) _url = self.read_host + '/packages/findBeDepend' try: response = requests.post(_url, data=json.dumps( @@ -1138,8 +1167,9 @@ class SingleCommand(PkgshipCommand): self.parse = PkgshipCommand.subparsers.add_parser( 'single', help='query the information of a single package') self.params = [ - ('packagename', 'str', 'source package name', ''), - ('-db', 'str', 'name of the database operated', '') + ('packagename', 'str', 'source package name', '', 'store'), + ('-db', 'str', 'name of the database operated', '', 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] def register(self): @@ -1195,6 +1225,7 @@ class SingleCommand(PkgshipCommand): Raises: ConnectionError: requests connection error """ + self._set_read_host(params.remote) _url = self.read_host + \ '/packages/packageInfo?dbName={db_name}&sourceName={packagename}' \ .format(db_name=params.db, packagename=params.packagename) @@ -1210,5 +1241,88 @@ class SingleCommand(PkgshipCommand): self.http_error(response) +class IssueCommand(PkgshipCommand): + """ + Description: Get the issue list + Attributes: + parse: Command line parsing example + params: Command line parameters + """ + + def __init__(self): + """ + Description: Class instance initialization + """ + super(IssueCommand, self).__init__() + + self.parse = PkgshipCommand.subparsers.add_parser( + 'issue', help='Query the issue list of the specified package') + self.params = [ + ('packagename', 'str', 'source package name', '', 'store'), + ('-db', 'str', 'name of the database operated', '', 'store'), + ('-remote', 'str', 'The address of the remote service', False, 'store_true') + ] + + def register(self): + """ + Description: Command line parameter injection + + """ + super(IssueCommand, self).register() + self.parse.set_defaults(func=self.do_command) + + def parse_package(self, response_data): + """ + Description: Parse the corresponding data of the package + + Args: + response_data: http response data + """ + show_field_name = ('sourceName', 'dbname', 'version', + 'license', 'maintainer', 'maintainlevel') + print_contents = [] + if response_data.get('code') == ResponseCode.SUCCESS: + package_all = response_data.get('data') + if isinstance(package_all, list): + for package_item in package_all: + for key, value in package_item.items(): + if value is None: + value = '' + if key in show_field_name: + line_content = '%-15s:%s' % (key, value) + print_contents.append(line_content) + print_contents.append('=' * self.columns) + else: + print(response_data.get('msg')) + if print_contents: + for content in print_contents: + self.print_(content=content) + + def do_command(self, params): + """ + Description: Action to execute command + Args: + params: command lines params + Returns: + + Raises: + ConnectionError: requests connection error + """ + self._set_read_host(params.remote) + _url = self.read_host + \ + '/packages/issueTrace?dbName={db_name}&sourceName={packagename}' \ + .format(db_name=params.db, packagename=params.packagename) + try: + response = requests.get(_url) + except ConnErr as conn_error: + LOGGER.logger.error(conn_error) + print(str(conn_error)) + else: + if response.status_code == 200: + self.parse_package(json.loads(response.text)) + else: + self.http_error(response) + + if __name__ == '__main__': main() diff --git a/packageship/packageship/system_config.py b/packageship/packageship/system_config.py index 14de44a9..6033fb97 100644 --- a/packageship/packageship/system_config.py +++ b/packageship/packageship/system_config.py @@ -16,18 +16,18 @@ else: # system configuration file path -SYS_CONFIG_PATH = os.path.join('/', 'etc', 'pkgship', 'package.ini') +SYS_CONFIG_PATH = os.path.join(BASE_PATH, 'package.ini') # data file after successful data import DATABASE_FILE_INFO = os.path.join( - '/', 'var', 'run', 'database_file_info.yaml') + BASE_PATH, 'database_file_info.yaml') # If the path of the imported database is not specified in the configuration file, the # configuration in the system is used by default -DATABASE_FOLDER_PATH = os.path.join('/', 'var', 'run', 'pkgship_dbs') +DATABASE_FOLDER_PATH = os.path.join(BASE_PATH, 'pkgship_dbs') # If the directory of log storage is not configured, # it will be stored in the following directory specified by the system by default -LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'pkgship') +LOG_FOLDER_PATH = os.path.join(BASE_PATH, 'logs') -- Gitee From 203e87e65ffa8babf25fbd355bd62970e6026603 Mon Sep 17 00:00:00 2001 From: gongzt Date: Sat, 8 Aug 2020 14:04:11 +0800 Subject: [PATCH 09/19] Description of serialization class --- .../application/apps/lifecycle/serialize.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/packageship/packageship/application/apps/lifecycle/serialize.py b/packageship/packageship/application/apps/lifecycle/serialize.py index 5ed854e9..ac6c4cec 100644 --- a/packageship/packageship/application/apps/lifecycle/serialize.py +++ b/packageship/packageship/application/apps/lifecycle/serialize.py @@ -23,15 +23,20 @@ class IssueSchema(Schema): class IssueDownloadSchema(Schema): - - class Meta: + """ + Field serialization for issue file download + """ + class Meta: # pylint: disable=missing-class-docstring model = packages_issue fields = ('issue_id', 'issue_url', 'issue_content', 'issue_title', 'issue_status', 'name', 'issue_type', 'related_release') class PackagesDownloadSchema(Schema): - class Meta: + """ + Field serialization for package file download + """ + class Meta: # pylint: disable=missing-class-docstring model = packages fields = ('name', 'url', 'rpm_license', 'version', 'release', 'release_time', 'end_time', 'maintainer_status', 'latest_version', 'latest_version_time', -- Gitee From 5584342c07899957b902ea7938261f89ba0be387 Mon Sep 17 00:00:00 2001 From: gongzt Date: Sat, 8 Aug 2020 14:09:58 +0800 Subject: [PATCH 10/19] Optimize the acquisition of tags information, increase the storage of iSsue --- .../apps/lifecycle/function/download_yaml.py | 6 +- .../apps/lifecycle/function/gitee.py | 241 ++++++++++++++++++ 2 files changed, 244 insertions(+), 3 deletions(-) create mode 100644 packageship/packageship/application/apps/lifecycle/function/gitee.py diff --git a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py index d416c13c..28f50557 100644 --- a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py +++ b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py @@ -14,7 +14,7 @@ from packageship.application.models.package import packages_issue from packageship.libs.dbutils import DBHelper from packageship.libs.exception import Error, ContentNoneException from .base import Base -# from .gitee import Gitee +from .gitee import Gitee class ParseYaml(): @@ -90,8 +90,8 @@ class ParseYaml(): tags = self._yaml_content.get('git_tag', None) self._parse_tags_content(tags) # Save data to the database - # issue_list = Gitee(self.pkg, self._table_name, self._owner, - # self._repo).execute_request_content_save() + issue_list = Gitee(self.pkg, self._table_name, self._owner, + self._repo).execute_request_content_save() issue_list = [] try: with DBHelper(db_name="lifecycle") as database: diff --git a/packageship/packageship/application/apps/lifecycle/function/gitee.py b/packageship/packageship/application/apps/lifecycle/function/gitee.py new file mode 100644 index 00000000..2a7630a2 --- /dev/null +++ b/packageship/packageship/application/apps/lifecycle/function/gitee.py @@ -0,0 +1,241 @@ +#!/usr/bin/python3 +import os +import json +import shutil +import tarfile +import requests +from multiprocessing.dummy import Pool + +from packageship.libs.configutils.readconfig import ReadConfig +from packageship.libs.exception import Error +from .base import Base + + +class Gitee(Base): + """ + gitee version management tool related information acquisition + + """ + + def __init__(self, pkg_info, table_name, owner, repo): + self.table_name = table_name + self.pkg_info = pkg_info + self.owner = owner + self.repo = repo + self._read_config = ReadConfig() + self.url = "https://gitee.com/" + self.api_url = "https://gitee.com/api/v5/repos" + self.enterprise_url = "https://gitee.com/api/v5/enterprise/{}/pull_requests" + self.pool = None + self.issue_id = None + self.patch_files_path = self._read_config.get_system( + "patch_files_path") + + def query_issues_info(self, issue_id=""): + """ + Description: View the issue details of the specified package + Args: + issue_id: Issue id + Returns: + issue_content_list: The issue details of the specified package list + Raises: + + """ + issue_content_list = [] + issue_url = self.api_url + \ + "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id) + try: + response = requests.get( + issue_url, params={"state": "all", "per_page": 100}) + except Error as error: + self.log.logger.error(error) + return None + if response.status_code != 200: + self.log.logger.error(response.content.decode("utf-8")) + return False + total_page = int(response.headers['total_page']) + issue_content = self.parse_issue_content(response.json()) + issue_content_list.extend(issue_content) + if total_page > 1: + for i in range(2, total_page + 1): + response = requests.get( + issue_url, params={"state": "all", "per_page": 100, "page": i}) + issue_content_list.extend( + self.parse_issue_content(response.json())) + return json.dumps(issue_content_list, ensure_ascii=False) + + def parse_issues_content(self, sources): + """ + Description: Parse the response content and get issue content + Args:Issue list + + Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download + Raises: + """ + result_list = [] + if isinstance(sources, list): + for source in sources: + issue_content = self.parse_issue_content(source) + if issue_content: + result_list.append(issue_content) + else: + issue_content = self.parse_issue_content(sources) + if issue_content: + result_list.append(issue_content) + return result_list + + def parse_issue_content(self, source): + """ + Description: Parse the response content and get issue content + Args: Source of issue content + + Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download, issue_status + issue_type, related_release + Raises:KeyError + """ + try: + result_dict = {"issue_id": source['number'], "issue_url": source['html_url'], + "issue_title": source['title'].strip(), + "issue_content": source['body'].strip(), + "issue_status": source['state'], "issue_download": "", + "issue_type": source["issue_type"], + "related_release": source["labels"][0]['name'] if source["labels"] else None} + if source["issue_type"] == "缺陷": + self.pkg_info.defect = self.pkg_info.defect + 1 if self.pkg_info.defect else 1 + elif source["issue_type"] == "需求": + self.pkg_info.demand = self.pkg_info.demand + 1 if self.pkg_info.demand else 1 + elif source["issue_type"] == "CVE和安全问题": + self.pkg_info.cve = self.pkg_info.demand + 1 if self.pkg_info.demand else 1 + else: + pass + except KeyError as error: + self.log.logger.error(error) + return None + return result_dict + + def get_url_list_from_operate_logs(self): + """ + Description: Download patch + Args: + + Returns: + + """ + link_list = [] + operate_logs_url = self.enterprise_url.format(self.owner) + try: + response = requests.get(operate_logs_url, + params={"state": "all", "issue_number": "{}".format(self.issue_id)}) + except Error as error: + self.log.logger.error(error) + return None + if response.status_code != 200: + self.log.logger.error(response.content.decode("utf-8")) + return False + for content in response.json(): + # if "Pull Request" in content["content"]: + # issue_content_url = re.search( + # 'href=\"([^# ]*)\"', content["content"]).group(1) + link_list.append(content["diff_url"]) + return list(set(link_list)) + + def get_issue_files(self, urls): + """ + Description: Download the files associated with pr + Args: + urls: issue associates with pr url + Returns: + + """ + # full_urls = [] + # for url in urls: + # full_urls.append(url) + self.pool = Pool(5) + issue_urls = self.pool.map(self.get_files_url, urls) + dirname = os.path.join( + self.patch_files_path, self.table_name, self.pkg_info.name, self.issue_id) + if os.path.exists(dirname): + shutil.rmtree(dirname, ignore_errors=True) + os.makedirs(dirname, exist_ok=True) + os.chdir(dirname) + try: + self.pool.map(self.download_issue_file, issue_urls) + except Error as error: + self.log.logger.error(error) + return None + self.pool.close() + self.pool.join() + # Do we need to pack the file? + # return self.file_to_patch(dirname) + return dirname + + def get_files_url(self, base_url): + """ + Description: Download the files associated with pr + Args: + base_url: issue associates with pr url + + Returns:Get all file links in a given URL + + """ + try: + file_content = requests.get(base_url).json() + except Error as e: + self.log.logger.error(e) + return None + if file_content.status_code != 200: + self.log.logger.error(file_content.content.decode("utf-8")) + return False + urls = [url["raw_url"] for url in file_content] + return urls + + def download_issue_file(self, url): + """ + Description: Download issue file + Args: + url: + + Returns: + + """ + with open("{}.patch".format(self.issue_id), 'wb') as f: + f.write(requests.get(url).text) + + def file_to_patch(self, sourcefile): + """ + Description: Package folder, generate patch + Args: + sourcefile: + Returns:patch_path + + """ + patch_path = os.path.join( + self.patch_files_path, "{}_{}_{}.tar.gz".format(self.table_name, self.pkg_info.name, self.issue_id)) + if os.path.exists(patch_path): + os.remove(patch_path) + try: + with tarfile.open(patch_path, "w:gz") as tar: + tar.add(sourcefile, arcname=os.path.basename( + sourcefile)) + except IOError as error: + self.log.logger.error(error) + return patch_path + + def execute_request_content_save(self): + """ + Description: Make a request for the url address, extract the issue content, and save the pr files associated + with the issue + Args: + + Returns: issue_content_list + + """ + issue_content_list = [] + issue_contents = self.query_issues_info() + for issue in issue_contents: + self.issue_id = issue["issue_id"] + issue_file_urls = self.get_url_list_from_operate_logs() + issue_download = self.get_issue_files(issue_file_urls) + issue["issue_download"] = issue_download + issue_content_list.append(issue) + return issue_content_list -- Gitee From 0ff933231cb486d29dda965fc017a247d6d8a9df Mon Sep 17 00:00:00 2001 From: gongzt Date: Sat, 8 Aug 2020 14:18:55 +0800 Subject: [PATCH 11/19] =?UTF-8?q?=E6=92=A4=E9=94=80issue=E4=BF=9D=E5=AD=98?= =?UTF-8?q?=E7=9A=84=E6=9B=B4=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../apps/lifecycle/function/download_yaml.py | 6 +- .../apps/lifecycle/function/gitee.py | 241 ------------------ 2 files changed, 3 insertions(+), 244 deletions(-) delete mode 100644 packageship/packageship/application/apps/lifecycle/function/gitee.py diff --git a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py index 28f50557..d416c13c 100644 --- a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py +++ b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py @@ -14,7 +14,7 @@ from packageship.application.models.package import packages_issue from packageship.libs.dbutils import DBHelper from packageship.libs.exception import Error, ContentNoneException from .base import Base -from .gitee import Gitee +# from .gitee import Gitee class ParseYaml(): @@ -90,8 +90,8 @@ class ParseYaml(): tags = self._yaml_content.get('git_tag', None) self._parse_tags_content(tags) # Save data to the database - issue_list = Gitee(self.pkg, self._table_name, self._owner, - self._repo).execute_request_content_save() + # issue_list = Gitee(self.pkg, self._table_name, self._owner, + # self._repo).execute_request_content_save() issue_list = [] try: with DBHelper(db_name="lifecycle") as database: diff --git a/packageship/packageship/application/apps/lifecycle/function/gitee.py b/packageship/packageship/application/apps/lifecycle/function/gitee.py deleted file mode 100644 index 2a7630a2..00000000 --- a/packageship/packageship/application/apps/lifecycle/function/gitee.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/python3 -import os -import json -import shutil -import tarfile -import requests -from multiprocessing.dummy import Pool - -from packageship.libs.configutils.readconfig import ReadConfig -from packageship.libs.exception import Error -from .base import Base - - -class Gitee(Base): - """ - gitee version management tool related information acquisition - - """ - - def __init__(self, pkg_info, table_name, owner, repo): - self.table_name = table_name - self.pkg_info = pkg_info - self.owner = owner - self.repo = repo - self._read_config = ReadConfig() - self.url = "https://gitee.com/" - self.api_url = "https://gitee.com/api/v5/repos" - self.enterprise_url = "https://gitee.com/api/v5/enterprise/{}/pull_requests" - self.pool = None - self.issue_id = None - self.patch_files_path = self._read_config.get_system( - "patch_files_path") - - def query_issues_info(self, issue_id=""): - """ - Description: View the issue details of the specified package - Args: - issue_id: Issue id - Returns: - issue_content_list: The issue details of the specified package list - Raises: - - """ - issue_content_list = [] - issue_url = self.api_url + \ - "/{}/{}/issues/{}".format(self.owner, self.repo, issue_id) - try: - response = requests.get( - issue_url, params={"state": "all", "per_page": 100}) - except Error as error: - self.log.logger.error(error) - return None - if response.status_code != 200: - self.log.logger.error(response.content.decode("utf-8")) - return False - total_page = int(response.headers['total_page']) - issue_content = self.parse_issue_content(response.json()) - issue_content_list.extend(issue_content) - if total_page > 1: - for i in range(2, total_page + 1): - response = requests.get( - issue_url, params={"state": "all", "per_page": 100, "page": i}) - issue_content_list.extend( - self.parse_issue_content(response.json())) - return json.dumps(issue_content_list, ensure_ascii=False) - - def parse_issues_content(self, sources): - """ - Description: Parse the response content and get issue content - Args:Issue list - - Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download - Raises: - """ - result_list = [] - if isinstance(sources, list): - for source in sources: - issue_content = self.parse_issue_content(source) - if issue_content: - result_list.append(issue_content) - else: - issue_content = self.parse_issue_content(sources) - if issue_content: - result_list.append(issue_content) - return result_list - - def parse_issue_content(self, source): - """ - Description: Parse the response content and get issue content - Args: Source of issue content - - Returns:list:issue_id, issue_url, issue_content, issue_status, issue_download, issue_status - issue_type, related_release - Raises:KeyError - """ - try: - result_dict = {"issue_id": source['number'], "issue_url": source['html_url'], - "issue_title": source['title'].strip(), - "issue_content": source['body'].strip(), - "issue_status": source['state'], "issue_download": "", - "issue_type": source["issue_type"], - "related_release": source["labels"][0]['name'] if source["labels"] else None} - if source["issue_type"] == "缺陷": - self.pkg_info.defect = self.pkg_info.defect + 1 if self.pkg_info.defect else 1 - elif source["issue_type"] == "需求": - self.pkg_info.demand = self.pkg_info.demand + 1 if self.pkg_info.demand else 1 - elif source["issue_type"] == "CVE和安全问题": - self.pkg_info.cve = self.pkg_info.demand + 1 if self.pkg_info.demand else 1 - else: - pass - except KeyError as error: - self.log.logger.error(error) - return None - return result_dict - - def get_url_list_from_operate_logs(self): - """ - Description: Download patch - Args: - - Returns: - - """ - link_list = [] - operate_logs_url = self.enterprise_url.format(self.owner) - try: - response = requests.get(operate_logs_url, - params={"state": "all", "issue_number": "{}".format(self.issue_id)}) - except Error as error: - self.log.logger.error(error) - return None - if response.status_code != 200: - self.log.logger.error(response.content.decode("utf-8")) - return False - for content in response.json(): - # if "Pull Request" in content["content"]: - # issue_content_url = re.search( - # 'href=\"([^# ]*)\"', content["content"]).group(1) - link_list.append(content["diff_url"]) - return list(set(link_list)) - - def get_issue_files(self, urls): - """ - Description: Download the files associated with pr - Args: - urls: issue associates with pr url - Returns: - - """ - # full_urls = [] - # for url in urls: - # full_urls.append(url) - self.pool = Pool(5) - issue_urls = self.pool.map(self.get_files_url, urls) - dirname = os.path.join( - self.patch_files_path, self.table_name, self.pkg_info.name, self.issue_id) - if os.path.exists(dirname): - shutil.rmtree(dirname, ignore_errors=True) - os.makedirs(dirname, exist_ok=True) - os.chdir(dirname) - try: - self.pool.map(self.download_issue_file, issue_urls) - except Error as error: - self.log.logger.error(error) - return None - self.pool.close() - self.pool.join() - # Do we need to pack the file? - # return self.file_to_patch(dirname) - return dirname - - def get_files_url(self, base_url): - """ - Description: Download the files associated with pr - Args: - base_url: issue associates with pr url - - Returns:Get all file links in a given URL - - """ - try: - file_content = requests.get(base_url).json() - except Error as e: - self.log.logger.error(e) - return None - if file_content.status_code != 200: - self.log.logger.error(file_content.content.decode("utf-8")) - return False - urls = [url["raw_url"] for url in file_content] - return urls - - def download_issue_file(self, url): - """ - Description: Download issue file - Args: - url: - - Returns: - - """ - with open("{}.patch".format(self.issue_id), 'wb') as f: - f.write(requests.get(url).text) - - def file_to_patch(self, sourcefile): - """ - Description: Package folder, generate patch - Args: - sourcefile: - Returns:patch_path - - """ - patch_path = os.path.join( - self.patch_files_path, "{}_{}_{}.tar.gz".format(self.table_name, self.pkg_info.name, self.issue_id)) - if os.path.exists(patch_path): - os.remove(patch_path) - try: - with tarfile.open(patch_path, "w:gz") as tar: - tar.add(sourcefile, arcname=os.path.basename( - sourcefile)) - except IOError as error: - self.log.logger.error(error) - return patch_path - - def execute_request_content_save(self): - """ - Description: Make a request for the url address, extract the issue content, and save the pr files associated - with the issue - Args: - - Returns: issue_content_list - - """ - issue_content_list = [] - issue_contents = self.query_issues_info() - for issue in issue_contents: - self.issue_id = issue["issue_id"] - issue_file_urls = self.get_url_list_from_operate_logs() - issue_download = self.get_issue_files(issue_file_urls) - issue["issue_download"] = issue_download - issue_content_list.append(issue) - return issue_content_list -- Gitee From 2ed9ed72f354bfbb83dc2517652b622509b62c2a Mon Sep 17 00:00:00 2001 From: gongzt Date: Sat, 8 Aug 2020 15:17:04 +0800 Subject: [PATCH 12/19] =?UTF-8?q?=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E7=9A=84=E6=9B=B4=E6=94=B9=E8=BF=98=E5=8E=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../application/apps/lifecycle/function/__init__.py | 2 +- packageship/packageship/package.ini | 2 +- packageship/packageship/system_config.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packageship/packageship/application/apps/lifecycle/function/__init__.py b/packageship/packageship/application/apps/lifecycle/function/__init__.py index 4d4d49b0..45c02af5 100644 --- a/packageship/packageship/application/apps/lifecycle/function/__init__.py +++ b/packageship/packageship/application/apps/lifecycle/function/__init__.py @@ -12,6 +12,6 @@ def start_tasks(): """ app.apscheduler.add_job( # pylint: disable=no-member - func=update_pkg_info, id="update_package_data", trigger="interval", day_of_week=0) + func=update_pkg_info, id="update_package_data", trigger="cron", day_of_week='0') # update_pkg_info() diff --git a/packageship/packageship/package.ini b/packageship/packageship/package.ini index 51f47ce9..093b5995 100644 --- a/packageship/packageship/package.ini +++ b/packageship/packageship/package.ini @@ -1,7 +1,7 @@ [SYSTEM] ; Configuration file path for data initialization -init_conf_path=D:\\Project\\database\\conf.yaml +init_conf_path=/etc/pkgship/conf.yaml ; Whether the system is in debug mode debug=false diff --git a/packageship/packageship/system_config.py b/packageship/packageship/system_config.py index 6033fb97..c5952ec1 100644 --- a/packageship/packageship/system_config.py +++ b/packageship/packageship/system_config.py @@ -16,18 +16,18 @@ else: # system configuration file path -SYS_CONFIG_PATH = os.path.join(BASE_PATH, 'package.ini') +SYS_CONFIG_PATH = os.path.join('/', 'etc', 'pkgship', 'package.ini') # data file after successful data import DATABASE_FILE_INFO = os.path.join( - BASE_PATH, 'database_file_info.yaml') + '/', 'var', 'run', 'database_file_info.yaml') # If the path of the imported database is not specified in the configuration file, the # configuration in the system is used by default -DATABASE_FOLDER_PATH = os.path.join(BASE_PATH, 'pkgship_dbs') +DATABASE_FOLDER_PATH = os.path.join('/', 'var', 'run', 'pkgship_dbs') # If the directory of log storage is not configured, # it will be stored in the following directory specified by the system by default -LOG_FOLDER_PATH = os.path.join(BASE_PATH, 'logs') +LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'logs') -- Gitee From 55e351a747d91567bffdd0fb984c569e96c1861d Mon Sep 17 00:00:00 2001 From: gongzt Date: Mon, 10 Aug 2020 09:33:19 +0800 Subject: [PATCH 13/19] Data batch import command line development --- packageship/packageship/pkgship.py | 187 ++++++++++++++++++++++++----- 1 file changed, 155 insertions(+), 32 deletions(-) diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index 2447d0e2..a80d0d45 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -143,7 +143,7 @@ class PkgshipCommand(BaseCommand): ['package name', 'src name', 'version', 'database']) # Calculate the total width of the current terminal - self.columns = int(os.popen('stty size', 'r').read().split()[1]) + # self.columns = int(os.popen('stty size', 'r').read().split()[1]) self.params = [] @staticmethod @@ -199,6 +199,8 @@ class PkgshipCommand(BaseCommand): cls.register_command(SelfBuildCommand()) cls.register_command(BeDependCommand()) cls.register_command(SingleCommand()) + cls.register_command(IssueCommand()) + cls.register_command(ImportCommand()) try: args = cls.parser.parse_args() args.func(args) @@ -549,7 +551,14 @@ class AllPackageCommand(PkgshipCommand): self.table = self.create_table( ['packagenames', 'database', 'version', 'license']) self.params = [('-db', 'str', 'name of the database operated', '', 'store'), - ('-remote', 'str', 'The address of the remote service', False, 'store_true')] + ('-remote', 'str', 'The address of the remote service', + False, 'store_true'), + ('-pkgname', 'str', + 'Package name that needs fuzzy matching', '', 'store'), + ('-maintainner', 'str', 'Maintainer\'s name', '', 'store'), + ('-maintainlevel', 'str', + 'Maintain the level of data', '', 'store'), + ] def register(self): """ @@ -564,6 +573,26 @@ class AllPackageCommand(PkgshipCommand): super(AllPackageCommand, self).register() self.parse.set_defaults(func=self.do_command) + def __parse_package(self, response_data, table_name): + """ + Description: Parse the corresponding data of the package + Args: + response_data: http request response content + Returns: + + Raises: + + """ + if response_data.get('code') == ResponseCode.SUCCESS: + package_all = response_data.get('data') + if isinstance(package_all, list): + for package_item in package_all: + row_data = [package_item.get('name'), table_name, package_item.get( + 'version'), package_item.get('license')] + self.table.add_row(row_data) + else: + print(response_data.get('msg')) + def do_command(self, params): """ Description: Action to execute command @@ -576,7 +605,12 @@ class AllPackageCommand(PkgshipCommand): """ self._set_read_host(params.remote) _url = self.read_host + \ - '/packages?dbName={dbName}'.format(dbName=params.db) + '/packages?table_name={table_name}&query_pkg_name={pkg_name}& \ + maintainner={maintainner}&maintainlevel={maintainlevel}'.format( + table_name=params.db, + pkg_name=params.pkgname, + maintainner=params.maintainner, + maintainlevel=params.maintainlevel) try: response = requests.get(_url) except ConnErr as conn_error: @@ -585,7 +619,7 @@ class AllPackageCommand(PkgshipCommand): else: if response.status_code == 200: - self.parse_package(json.loads(response.text)) + self.__parse_package(json.loads(response.text), params.db) if self.table: print(self.table) else: @@ -611,9 +645,9 @@ class UpdatePackageCommand(PkgshipCommand): self.params = [ ('packagename', 'str', 'Source package name', '', 'store'), ('db', 'str', 'name of the database operated', '', 'store'), - ('-m', 'str', 'Maintainers name', '', 'store'), - ('-l', 'int', 'database priority', 1, 'store'), - ('-t', 'str', 'package expiry date', '', 'store') + ('-maintainer', 'str', 'Maintainers name', '', 'store'), + ('-maintainlevel', 'int', 'database priority', 1, 'store'), + ('-endoflife', 'str', 'package expiry date', '', 'store') ] def register(self): @@ -639,14 +673,14 @@ class UpdatePackageCommand(PkgshipCommand): Raises: ConnectionError: Request connection error """ - _url = self.write_host + '/packages/packageInfo' + _url = self.write_host + '/lifeCycle/updatePkgInfo' try: response = requests.put( - _url, data=json.dumps({'srcname': params.packagename, - 'tbname': params.db, - 'maintainer': params.m, - 'maintainlevel': params.l, - 'end_time': params.t}), + _url, data=json.dumps({'pkg_name': params.packagename, + 'table_name': params.db, + 'maintainer': params.maintainer, + 'maintainlevel': params.maintainlevel, + 'end_of_life': params.endoflife}), headers=self.headers) except ConnErr as conn_error: LOGGER.logger.error(conn_error) @@ -1171,6 +1205,8 @@ class SingleCommand(PkgshipCommand): ('-db', 'str', 'name of the database operated', '', 'store'), ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] + self.provides_table = self.create_table(['Symbol', 'Required by']) + self.requires_table = self.create_table(['Symbol', 'Provides by']) def register(self): """ @@ -1185,7 +1221,42 @@ class SingleCommand(PkgshipCommand): super(SingleCommand, self).register() self.parse.set_defaults(func=self.do_command) - def parse_package(self, response_data): + def __parse_package_detail(self, response_data): + """ + + """ + _show_field_name = ('pkg_name', 'version', 'release', 'url', 'license', 'feature', + 'maintainer', 'maintainlevel', 'gitee_url', 'issue', 'summary', + 'description', 'buildrequired') + _package_detail_info = response_data.get('data') + _line_content = [] + if _package_detail_info: + for key, value in _package_detail_info.items(): + if value is None: + value = '' + # buildrequired value + if isinstance(value, list): + value = '、'.join(value) + if key in _show_field_name: + _line_content.append('%-15s:%s' % (key, value)) + for content in _line_content: + self.print_(content=content) + + def __parse_provides(self, subpacks): + """ + + """ + if subpacks and isinstance(subpacks, list): + for _subpack in subpacks: + pass + + def __parse_requires(self, subpacks): + """ + + """ + pass + + def __parse_package(self, response_data): """ Description: Parse the corresponding data of the package Args: @@ -1195,25 +1266,17 @@ class SingleCommand(PkgshipCommand): Raises: """ - show_field_name = ('sourceName', 'dbname', 'version', - 'license', 'maintainer', 'maintainlevel') - print_contents = [] + if response_data.get('code') == ResponseCode.SUCCESS: - package_all = response_data.get('data') - if isinstance(package_all, list): - for package_item in package_all: - for key, value in package_item.items(): - if value is None: - value = '' - if key in show_field_name: - line_content = '%-15s:%s' % (key, value) - print_contents.append(line_content) - print_contents.append('=' * self.columns) + self.__parse_package_detail(response_data) + try: + _subpacks = response_data['data']['subpack'] + self.__parse_provides(_subpacks) + self.__parse_requires(_subpacks) + except KeyError as key_error: + LOGGER.logger.error(key_error) else: print(response_data.get('msg')) - if print_contents: - for content in print_contents: - self.print_(content=content) def do_command(self, params): """ @@ -1227,7 +1290,7 @@ class SingleCommand(PkgshipCommand): """ self._set_read_host(params.remote) _url = self.read_host + \ - '/packages/packageInfo?dbName={db_name}&sourceName={packagename}' \ + '/packages/packageInfo?table_name={db_name}&pkg_name={packagename}' \ .format(db_name=params.db, packagename=params.packagename) try: response = requests.get(_url) @@ -1236,7 +1299,7 @@ class SingleCommand(PkgshipCommand): print(str(conn_error)) else: if response.status_code == 200: - self.parse_package(json.loads(response.text)) + self.__parse_package(json.loads(response.text)) else: self.http_error(response) @@ -1324,5 +1387,65 @@ class IssueCommand(PkgshipCommand): self.http_error(response) +class ImportCommand(PkgshipCommand): + """ + Description: Import package information in the life cycle + Attributes: + parse: Command line parsing example + params: Command line parameters + """ + + def __init__(self): + """ + Description: Class instance initialization + """ + super(ImportCommand, self).__init__() + + self.parse = PkgshipCommand.subparsers.add_parser( + 'import', help='Import package information in the life cycle') + self.params = [ + ('tablename', 'str', 'The name of the table to be created', '', 'store'), + ('filepath', 'str', 'Imported sqlite file path', '', 'store'), + ] + + def register(self): + """ + Description: Command line parameter injection + + """ + super(ImportCommand, self).register() + self.parse.set_defaults(func=self.do_command) + + def do_command(self, params): + """ + Description: Action to execute command + Args: + params: command lines params + Returns: + + Raises: + ConnectionError: requests connection error + """ + _url = self.write_host + '/lifeCycle/importdata' + try: + response = requests.post( + _url, + data=json.dumps( + {'filepath': params.filepath, 'tablename': params.tablename}), + headers=self.headers) + except ConnErr as conn_error: + LOGGER.logger.error(conn_error) + print(str(conn_error)) + else: + if response.status_code == 200: + _response_content = json.loads(response.text) + if _response_content.get('code') == ResponseCode.SUCCESS: + print('import success') + else: + print('import failure') + else: + self.http_error(response) + + if __name__ == '__main__': main() -- Gitee From 40c558e31be6d0a4f37c795ad40ba73011634a9f Mon Sep 17 00:00:00 2001 From: gongzt Date: Mon, 10 Aug 2020 09:58:40 +0800 Subject: [PATCH 14/19] Command line query function supports calling remote services --- .../packageship/application/apps/__init__.py | 2 - .../application/apps/lifecycle/__init__.py | 20 -- .../apps/lifecycle/function/__init__.py | 17 -- .../apps/lifecycle/function/base.py | 40 ---- .../apps/lifecycle/function/download_yaml.py | 177 --------------- .../application/apps/lifecycle/serialize.py | 43 ---- .../application/apps/lifecycle/url.py | 13 -- .../application/apps/lifecycle/view.py | 206 ------------------ .../application/initsystem/data_import.py | 102 ++++++--- .../packageship/application/models/package.py | 46 +--- packageship/packageship/pkgship.py | 181 +-------------- packageship/packageship/selfpkg.py | 2 - 12 files changed, 75 insertions(+), 774 deletions(-) delete mode 100644 packageship/packageship/application/apps/lifecycle/__init__.py delete mode 100644 packageship/packageship/application/apps/lifecycle/function/__init__.py delete mode 100644 packageship/packageship/application/apps/lifecycle/function/base.py delete mode 100644 packageship/packageship/application/apps/lifecycle/function/download_yaml.py delete mode 100644 packageship/packageship/application/apps/lifecycle/serialize.py delete mode 100644 packageship/packageship/application/apps/lifecycle/url.py delete mode 100644 packageship/packageship/application/apps/lifecycle/view.py diff --git a/packageship/packageship/application/apps/__init__.py b/packageship/packageship/application/apps/__init__.py index 81bcadda..0cb8d57f 100644 --- a/packageship/packageship/application/apps/__init__.py +++ b/packageship/packageship/application/apps/__init__.py @@ -3,11 +3,9 @@ Blueprint collection trying to page """ from packageship.application.apps.package import package, api as package_api -from packageship.application.apps.lifecycle import lifecycle, api as life_cycle_api blue_point = [ (package, package_api), - (lifecycle, life_cycle_api) ] __all__ = ['blue_point'] diff --git a/packageship/packageship/application/apps/lifecycle/__init__.py b/packageship/packageship/application/apps/lifecycle/__init__.py deleted file mode 100644 index d17a06a5..00000000 --- a/packageship/packageship/application/apps/lifecycle/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/python3 -""" - Blueprint registration for life cycle -""" -from flask.blueprints import Blueprint -from flask_restful import Api -from packageship.application.apps.lifecycle.url import urls -from packageship import application - -lifecycle = Blueprint('lifecycle', __name__) - -# init restapi -api = Api() - -for view, url, operation in urls: - if application.OPERATION and application.OPERATION in operation.keys(): - api.add_resource(view, url) - - -__all__ = ['lifecycle', 'api'] diff --git a/packageship/packageship/application/apps/lifecycle/function/__init__.py b/packageship/packageship/application/apps/lifecycle/function/__init__.py deleted file mode 100644 index 45c02af5..00000000 --- a/packageship/packageship/application/apps/lifecycle/function/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/python3 -""" -Registration of timed tasks -""" -from packageship.selfpkg import app -from .download_yaml import update_pkg_info - - -def start_tasks(): - """ - Start of timing tasks, used to register timing tasks that need to be executed - - """ - app.apscheduler.add_job( # pylint: disable=no-member - func=update_pkg_info, id="update_package_data", trigger="cron", day_of_week='0') - - # update_pkg_info() diff --git a/packageship/packageship/application/apps/lifecycle/function/base.py b/packageship/packageship/application/apps/lifecycle/function/base.py deleted file mode 100644 index 1d42b32a..00000000 --- a/packageship/packageship/application/apps/lifecycle/function/base.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/python3 -""" -General approach to version control tools -""" -import datetime as date -from dateutil.relativedelta import relativedelta -from packageship.libs.log import Log - - -class Base(): - """ - Public method to get project tags and download yaml file - """ - - def __init__(self): - self.headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW 64; rv:50.0) Gecko/20100101 \ - Firefox / 50.0 '} - self.log = Log(__name__) - - def format_date(self, date_time, month=0, format_str='%Y-%m-%d'): - """ - Date formatting operations - - """ - _date = date.datetime.strptime( - date_time, format_str) - _date = _date + relativedelta(month=month) - return _date - - def pkg_status(self, end_date): - """ - Get package status information according to the last validity period of the package - - """ - now_date = date.datetime.now() - maintainer_status = 'Available' - if (end_date - now_date).days < 0: - maintainer_status = "Overdue" - return maintainer_status diff --git a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py b/packageship/packageship/application/apps/lifecycle/function/download_yaml.py deleted file mode 100644 index d416c13c..00000000 --- a/packageship/packageship/application/apps/lifecycle/function/download_yaml.py +++ /dev/null @@ -1,177 +0,0 @@ -#!/usr/bin/python3 -""" -Dynamically obtain the content of the yaml file \ -that saves the package information, periodically \ -obtain the content and save it in the database -""" -from concurrent.futures import ThreadPoolExecutor -import requests -import yaml -from sqlalchemy.exc import SQLAlchemyError -from requests.exceptions import HTTPError -from packageship.application.models.package import packages -from packageship.application.models.package import packages_issue -from packageship.libs.dbutils import DBHelper -from packageship.libs.exception import Error, ContentNoneException -from .base import Base -# from .gitee import Gitee - - -class ParseYaml(): - """ - Description: Download the contents of the yaml file - - Attributes: - base: base class instance - pkg: Specific package data - _table_name: The name of the data table to be operated - _owner: The address of the corporate warehouse - _repo: The address of the source code repository - openeuler_advisor_url: Get the warehouse address of the yaml file - _yaml_content: The content of the yaml file - """ - - def __init__(self, pkg_info, base, table_name): - self.base = base - self.pkg = pkg_info - self._table_name = table_name - self._owner = "src-openeuler" - self._repo = self.pkg.name - self.openeuler_advisor_url = \ - 'https://gitee.com/openeuler/openEuler-Advisor/raw/master/upstream-info/{name}.yaml'\ - .format(name=pkg_info.name) - self._yaml_content = None - - def update_pkg_info(self): - """ - Download the contents of the yaml file - - """ - if self._openeuler_advisor_exists_yaml(): - self._save_to_database() - else: - msg = "The yaml information of the %s package has not been\ - obtained yet" % self.pkg.name - self.base.log.logger.warning(msg) - - def _read_yaml_content(self, url): - """ - - """ - try: - response = requests.get( - url, headers=self.base.headers) - if response.status_code == 200: - self._yaml_content = yaml.safe_load(response.content) - - except HTTPError as error: - self.base.log.logger.error(error) - - def _openeuler_advisor_exists_yaml(self): - """ - Determine whether there is a yaml file with the current \ - package name under the openeuler-advisor project - - """ - self._read_yaml_content(self.openeuler_advisor_url) - if self._yaml_content: - return True - return False - - def _save_to_database(self): - """ - Save the acquired yaml file information to the database - - Raises: - ContentNoneException: The added entity content is empty - Error: An error occurred during data addition - """ - self._parse_warehouse_info() - tags = self._yaml_content.get('git_tag', None) - self._parse_tags_content(tags) - # Save data to the database - # issue_list = Gitee(self.pkg, self._table_name, self._owner, - # self._repo).execute_request_content_save() - issue_list = [] - try: - with DBHelper(db_name="lifecycle") as database: - database.add(self.pkg) - self._save_issues(issue_list, database) - except (Error, ContentNoneException, SQLAlchemyError) as error: - self.base.log.logger.error(error) - - def _save_issues(self, issue_list, database): - """ - Save the obtained issue information - - """ - issue_ids = [issue['issue_id'] for issue in issue_list] - exist_issues = database.session.query(packages_issue).filter( - packages_issue.issue_id.in_(issue_ids)).all() # pylint: disable=protected-access - add_issue_list = [] - for issue_item in issue_list: - issue_model = [ - issue for issue in exist_issues if issue.issue_id == issue_item['issue_id']] - if issue_model: - for key, val in issue_item.items(): - setattr(issue_model[0], key, val) - else: - add_issue_list.append(issue_item) - database.batch_add(add_issue_list, packages_issue) - - def _parse_warehouse_info(self): - """ - Parse the warehouse information in the yaml file - - """ - if self._yaml_content: - self.pkg.version_control = self._yaml_content.get( - 'version_control') - self.pkg.src_repo = self._yaml_content.get('src_repo') - self.pkg.tag_prefix = self._yaml_content.get('tag_prefix') - - def _parse_tags_content(self, tags): - """ - Parse the obtained tags content - - """ - try: - # Integrate tags information into key-value pairs - _tags = [(tag.split()[0], tag.split()[1]) for tag in tags] - _tags = sorted(_tags, key=lambda x: x[0], reverse=True) - self.pkg.latest_version = _tags[0][1] - self.pkg.latest_version_time = _tags[0][0] - _end_time = self.base.format_date( - self.pkg.latest_version_time, month=6) - if self.pkg.latest_version != self.pkg.version: - _end_time = self.base.format_date( - self.pkg.latest_version_time, month=3) - self.pkg.maintainer_status = self.base.pkg_status( - _end_time) - self.pkg.end_time = _end_time.strftime("%Y-%m-%d") - - except (IndexError,) as index_error: - self.base.log.logger.error(index_error) - - -def update_pkg_info(): - """ - Update the information of the upstream warehouse in the source package - - """ - try: - base_control = Base() - pool = ThreadPoolExecutor(max_workers=10) - with DBHelper(db_name="lifecycle") as database: - for table_name in filter(lambda x: x != 'packages_issue', - database.engine.table_names()): - cls_model = type("packages", (packages, DBHelper.BASE), { - '__tablename__': table_name}) - # Query a specific table - for package_item in database.session.query(cls_model).all(): - parse_yaml = ParseYaml( - pkg_info=package_item, base=base_control, table_name=table_name) - pool.submit(parse_yaml.update_pkg_info) - pool.shutdown() - except SQLAlchemyError as error_msg: - base_control.log.logger.error(error_msg) diff --git a/packageship/packageship/application/apps/lifecycle/serialize.py b/packageship/packageship/application/apps/lifecycle/serialize.py deleted file mode 100644 index ac6c4cec..00000000 --- a/packageship/packageship/application/apps/lifecycle/serialize.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/python3 -""" -Description: marshmallow serialize -""" -from marshmallow import Schema -from marshmallow import fields -from marshmallow import validate -from packageship.application.models.package import packages_issue, packages - - -class IssueSchema(Schema): - """ - Description: IssueSchema serialize - """ - # openeuler 20 - tableName = fields.Str( - required=True, validate=validate.Length(min=1, max=200)) - # repo - packageName = fields.Str(validate=validate.Length( - max=200), required=False, allow_none=True) - page = fields.Integer(required=True) - per_page = fields.Integer(required=True) - - -class IssueDownloadSchema(Schema): - """ - Field serialization for issue file download - """ - class Meta: # pylint: disable=missing-class-docstring - model = packages_issue - fields = ('issue_id', 'issue_url', 'issue_content', - 'issue_title', 'issue_status', 'name', 'issue_type', 'related_release') - - -class PackagesDownloadSchema(Schema): - """ - Field serialization for package file download - """ - class Meta: # pylint: disable=missing-class-docstring - model = packages - fields = ('name', 'url', 'rpm_license', 'version', 'release', 'release_time', - 'end_time', 'maintainer_status', 'latest_version', 'latest_version_time', - 'demand', 'cve', 'defect', 'maintainer', 'maintainlevel', 'feature') diff --git a/packageship/packageship/application/apps/lifecycle/url.py b/packageship/packageship/application/apps/lifecycle/url.py deleted file mode 100644 index 387bacda..00000000 --- a/packageship/packageship/application/apps/lifecycle/url.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/python3 -""" -Life cycle of url giant whale collection -""" -from . import view - - -urls = [ - (view.DownloadFile, '/lifeCycle/download/', {'query': ('GET')}), - (view.MaintainerView, '/lifeCycle/maintainer', {'query': ('GET')}), - (view.TableColView, '/packages/tablecol', {'query': ('GET')}), - -] diff --git a/packageship/packageship/application/apps/lifecycle/view.py b/packageship/packageship/application/apps/lifecycle/view.py deleted file mode 100644 index 7c5a8242..00000000 --- a/packageship/packageship/application/apps/lifecycle/view.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/python3 -""" -Life cycle related api interface -""" -import io -import pandas as pd -from flask import request -from flask import jsonify, make_response -from flask import current_app -from flask_restful import Resource -from sqlalchemy.exc import DisconnectionError, SQLAlchemyError -from packageship.libs.exception import Error -from packageship.application.apps.package.function.constants import ResponseCode -from packageship.libs.dbutils.sqlalchemy_helper import DBHelper -from packageship.application.models.package import packages_issue, packages -from .serialize import IssueDownloadSchema, PackagesDownloadSchema - - -def meta_model(table_name): - """ - The mapping relationship of the orm model - """ - model = type("packages", (packages, DBHelper.BASE), { - '__tablename__': table_name}) - return model - - -class DownloadFile(Resource): - """ - Download the content of the issue or the excel file of the package content - """ - - def _download_excel(self, file_type, table_name=None): - """ - Download excel file - """ - file_name = 'packages.xlsx' - if file_type == 'packages': - download_content = self.__get_packages_content(table_name) - else: - file_name = 'issues.xlsx' - download_content = self.__get_issues_content() - if download_content is None: - return jsonify(ResponseCode.response_json(ResponseCode.SERVICE_ERROR)) - pd_dataframe = self.__to_dataframe(download_content) - - _response = self.__bytes_save(pd_dataframe) - return self.__set_response_header(_response, file_name) - - def __bytes_save(self, data_frame): - """ - Save the file content in the form of a binary file stream - """ - try: - bytes_io = io.BytesIO() - writer = pd.ExcelWriter( # pylint: disable=abstract-class-instantiated - bytes_io, engine='xlsxwriter') - data_frame.to_excel(writer, sheet_name='结果汇总', index=False) - writer.save() - writer.close() - bytes_io.seek(0) - _response = make_response(bytes_io.getvalue()) - bytes_io.close() - return _response - except (IOError, Error) as io_error: - current_app.logger.error(io_error) - return make_response() - - def __set_response_header(self, response, file_name): - """ - Set http response header information - """ - response.headers['Content-Type'] = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" - response.headers["Cache-Control"] = "no-cache" - response.headers['Content-Disposition'] = 'attachment; filename={file_name}'.format( - file_name=file_name) - return response - - def __get_packages_content(self, table_name): - """ - Get package list information - """ - try: - with DBHelper(db_name='lifecycle') as database: - # Query all package data in the specified table - _model = meta_model(table_name) - _packageinfos = database.session.query(_model).all() - packages_dicts = PackagesDownloadSchema( - many=True).dump(_packageinfos) - return packages_dicts - - except (SQLAlchemyError, DisconnectionError) as error: - current_app.logger.error(error) - return None - - def __get_issues_content(self): - """ - Get the list of issues - """ - try: - with DBHelper(db_name='lifecycle') as database: - _issues = database.session.query(packages_issue).all() - idsues_dicts = IssueDownloadSchema(many=True).dump(_issues) - return idsues_dicts - except (SQLAlchemyError, DisconnectionError) as error: - current_app.logger.error(error) - return None - - def __to_dataframe(self, datas): - """ - Convert the obtained information into pandas content format - """ - - data_frame = pd.DataFrame(datas) - return data_frame - - def get(self, file_type): - """ - Download package collection information and isse list information - - """ - if file_type not in ['packages', 'issues']: - return jsonify(ResponseCode.response_json(ResponseCode.PARAM_ERROR)) - - table_name = request.args.get('table_name', None) - response = self._download_excel(file_type, table_name) - return response - - -class MaintainerView(Resource): - """ - Maintainer name collection - """ - - def __query_maintainers(self, table_name): - """ - Query the names of all maintainers in the specified table - """ - try: - with DBHelper(db_name='lifecycle') as database: - model = meta_model(table_name) - maintainers = database.session.query( - model.maintainer).group_by(model.maintainer).all() - return [maintainer_item[0] for maintainer_item in maintainers] - except (SQLAlchemyError, DisconnectionError) as error: - current_app.logger.error(error) - return [] - - def get(self): - """ - Get the list of maintainers - """ - table_name = request.args.get('table_name', None) - if not table_name: - return jsonify(ResponseCode.response_json(ResponseCode.PARAM_ERROR)) - # Group query of the names of all maintainers in the current table - maintainers = self.__query_maintainers(table_name) - return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS, maintainers)) - - -class TableColView(Resource): - """ - The default column of the package shows the interface - """ - - def __columns_names(self): - """ - Mapping of column name and title - """ - columns = [ - ('name', 'Name', True), - ('version', 'Version', True), - ('release', 'Release', True), - ('url', 'Url', True), - ('linense', 'License', False), - ('feature', 'Feature', False), - ('maintainer', 'Maintainer', True), - ('maintainlevel', 'Maintenance Level', True), - ('release_time', 'Release Time', False), - ('end_of_lifecycle', 'End of Life Cycle', True), - ('maintainer_status', 'Maintain Status', True), - ('latest_version', 'Latest Version', False), - ('latest_version_time', 'Latest Version Release Time', False), - ('issue', 'Issue', True)] - return columns - - def __columns_mapping(self): - """ - - """ - columns = list() - for column in self.__columns_names(): - columns.append({ - 'column_name': column[0], - 'label': column[1], - 'default_selected': column[2] - }) - return columns - - def get(self): - """ - Get the default display column of the package - - """ - table_mapping_columns = self.__columns_mapping() - return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS, table_mapping_columns)) diff --git a/packageship/packageship/application/initsystem/data_import.py b/packageship/packageship/application/initsystem/data_import.py index cb326309..03f1e7ea 100644 --- a/packageship/packageship/application/initsystem/data_import.py +++ b/packageship/packageship/application/initsystem/data_import.py @@ -19,6 +19,7 @@ from packageship.application.models.package import bin_pack from packageship.application.models.package import bin_requires from packageship.application.models.package import src_requires from packageship.application.models.package import bin_provides +from packageship.application.models.package import maintenance_info from packageship import system_config LOGGER = Log(__name__) @@ -112,6 +113,20 @@ class InitDataBase(): raise IOError( 'An error occurred while deleting the database configuration file') + # Create a database maintained by benchmark information + if self.db_type == 'mysql': + MysqlDatabaseOperations( + db_name='maintenance.information', + tables=['maintenance_info'], + is_datum=True).create_database() + else: + SqliteDatabaseOperations( + db_name='maintenance.information', + tables=['maintenance_info'], + is_datum=True).create_database() + # Obtain the maintenance information of the previous data of the system + self._get_maintenance_info() + for database in self.config_file_datas: if not database.get('dbname'): LOGGER.logger.error( @@ -303,6 +318,12 @@ class InitDataBase(): raise ContentNoneException( '{db_name}:There is no relevant data in the source \ package provided '.format(db_name=db_name)) + for index, src_package_item in enumerate(packages_datas): + maintaniner, maintainlevel = self._get_mainter_info( + src_package_item.get('name'), src_package_item.get('version')) + packages_datas[index]['maintaniner'] = maintaniner + packages_datas[index]['maintainlevel'] = maintainlevel + with DBHelper(db_name=db_name) as database: database.batch_add(packages_datas, src_pack) @@ -398,6 +419,52 @@ class InitDataBase(): with DBHelper(db_name=db_name) as database: database.batch_add(provides_datas, bin_provides) + def _get_maintenance_info(self): + """ + Description: Obtain the information of the maintainer + + Returns: + Maintainer related information + Raises: + SQLAlchemyError: An error occurred while executing the sql statement + """ + try: + with DBHelper(db_name='maintenance.information') as database: + for info in database.session.query(maintenance_info).all(): + if info.name not in self.mainter_infos.keys(): + self.mainter_infos[info.name] = [] + self.mainter_infos[info.name].append({ + 'version': info.version, + 'maintaniner': info.maintaniner, + 'maintainlevel': info.maintainlevel + }) + except SQLAlchemyError as sql_error: + LOGGER.logger.error(sql_error) + + def _get_mainter_info(self, src_package_name, version): + ''' + Get the maintainer information of the source package + + Args: + src_package_name: Source package name + version: Source package version number + Returns: + Maintainer's name + Raises: + + ''' + maintenance_infos = self.mainter_infos.get(src_package_name) + maintaniner = None + if maintenance_infos: + for maintenance_item in maintenance_infos: + if maintenance_item.get('version') == version: + maintaniner = (maintenance_item.get( + 'maintaniner'), maintenance_item.get('maintainlevel')) + break + if maintaniner is None: + maintaniner = (None, None) + return maintaniner + def __exists_repeat_database(self): """ Determine if the same database name exists @@ -506,25 +573,6 @@ class InitDataBase(): return del_result - def create_life_cycle_db(self, db_name, tables=None): - """ - Create databases and tables related to the package life cycle - - Args: - db_name: The name of the database - tables: Table to be created - """ - database_engine = SqliteDatabaseOperations( - db_name=db_name, - tables=tables, - is_datum=True) - if self.db_type == 'mysql': - database_engine = MysqlDatabaseOperations( - db_name=db_name, - tables=tables, - is_datum=True) - return database_engine.create_database() - class MysqlDatabaseOperations(): """ @@ -568,10 +616,7 @@ class MysqlDatabaseOperations(): if not self.is_datum: data_base.session.execute(self.drop_database_sql) data_base.session.execute(self.create_database_sql) - except InternalError as internal_error: - LOGGER.logger.info(internal_error) - return True - except SQLAlchemyError as exception_msg: + except (SQLAlchemyError, InternalError) as exception_msg: LOGGER.logger.error(exception_msg) return False else: @@ -616,9 +661,7 @@ class MysqlDatabaseOperations(): try: with DBHelper(db_name=self.db_name) as database: if self.tables: - _tables = list(set(self.tables).difference( - set(database.engine.table_names()))) - database.create_table(_tables) + database.create_table(self.tables) except SQLAlchemyError as exception_msg: LOGGER.logger.error(exception_msg) @@ -691,17 +734,14 @@ class SqliteDatabaseOperations(): _db_file = os.path.join( self.database_file_folder, self.db_name) - if not self.is_datum and os.path.exists(_db_file + '.db'): + if os.path.exists(_db_file + '.db'): os.remove(_db_file + '.db') # create a sqlite database if (self.is_datum and not os.path.exists(_db_file + '.db')) or not self.is_datum: with DBHelper(db_name=_db_file) as database: try: - if self.tables: - _tables = list(set(self.tables).difference( - set(database.engine.table_names()))) - database.create_table(_tables) + database.create_table(self.tables) except (SQLAlchemyError, InternalError) as create_table_err: LOGGER.logger.error(create_table_err) return None diff --git a/packageship/packageship/application/models/package.py b/packageship/packageship/application/models/package.py index 1413b516..725f4af7 100644 --- a/packageship/packageship/application/models/package.py +++ b/packageship/packageship/application/models/package.py @@ -2,7 +2,7 @@ """ Description: Database entity model mapping """ -from sqlalchemy import Column, Integer, String, Text +from sqlalchemy import Column, Integer, String from packageship.libs.dbutils.sqlalchemy_helper import DBHelper @@ -141,47 +141,3 @@ class maintenance_info(DBHelper.BASE): # pylint: disable=C0103,R0903 maintaniner = Column(String(100), nullable=True) maintainlevel = Column(String(100), nullable=True) - - -class packages(): # pylint: disable=C0103,R0903 - """ - Source code package version, issuer and other information - """ - __table_args__ = {'extend_existing': True} - id = Column(Integer, primary_key=True) - name = Column(String(500), nullable=True) - url = Column(String(500), nullable=True) - rpm_license = Column(String(500), nullable=True) - version = Column(String(200), nullable=True) - release = Column(String(200), nullable=True) - release_time = Column(String(50), nullable=True) - end_time = Column(String(50), nullable=True) - maintainer_status = Column(String(20), nullable=True, default="Available") - latest_version = Column(String(200), nullable=True) - latest_version_time = Column(String(50), nullable=True) - demand = Column(Integer, default=0) - cve = Column(Integer, default=0) - defect = Column(Integer, default=0) - maintainer = Column(String(200), nullable=True) - maintainlevel = Column(Integer, nullable=True) - feature = Column(String(500), nullable=True) - version_control = Column(String(50), nullable=True) - src_repo = Column(String(500), nullable=True) - tag_prefix = Column(String(20), nullable=True) - - -class packages_issue(DBHelper.BASE): # pylint: disable=C0103,R0903 - """ - Source package issue - """ - __tablename__ = "packages_issue" - id = Column(Integer, primary_key=True) - issue_id = Column(String(50), nullable=True) - issue_url = Column(String(500), nullable=True) - issue_content = Column(Text, nullable=True) - issue_title = Column(String(1000), nullable=True) - issue_status = Column(String(20), nullable=True) - name = Column(String(500), nullable=False) - issue_download = Column(String(500), nullable=False) - issue_type = Column(String(50), nullable=True) - related_release = Column(String(500), nullable=True) diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index a80d0d45..fd2b3a24 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -143,7 +143,7 @@ class PkgshipCommand(BaseCommand): ['package name', 'src name', 'version', 'database']) # Calculate the total width of the current terminal - # self.columns = int(os.popen('stty size', 'r').read().split()[1]) + self.columns = int(os.popen('stty size', 'r').read().split()[1]) self.params = [] @staticmethod @@ -173,7 +173,6 @@ class PkgshipCommand(BaseCommand): for command_params in self.params: self.parse.add_argument( # pylint: disable=E1101 command_params[0], - # type=eval(command_params[1]), # pylint: disable=W0123 help=command_params[2], default=command_params[3], action=command_params[4]) @@ -199,8 +198,6 @@ class PkgshipCommand(BaseCommand): cls.register_command(SelfBuildCommand()) cls.register_command(BeDependCommand()) cls.register_command(SingleCommand()) - cls.register_command(IssueCommand()) - cls.register_command(ImportCommand()) try: args = cls.parser.parse_args() args.func(args) @@ -1205,8 +1202,6 @@ class SingleCommand(PkgshipCommand): ('-db', 'str', 'name of the database operated', '', 'store'), ('-remote', 'str', 'The address of the remote service', False, 'store_true') ] - self.provides_table = self.create_table(['Symbol', 'Required by']) - self.requires_table = self.create_table(['Symbol', 'Provides by']) def register(self): """ @@ -1221,42 +1216,7 @@ class SingleCommand(PkgshipCommand): super(SingleCommand, self).register() self.parse.set_defaults(func=self.do_command) - def __parse_package_detail(self, response_data): - """ - - """ - _show_field_name = ('pkg_name', 'version', 'release', 'url', 'license', 'feature', - 'maintainer', 'maintainlevel', 'gitee_url', 'issue', 'summary', - 'description', 'buildrequired') - _package_detail_info = response_data.get('data') - _line_content = [] - if _package_detail_info: - for key, value in _package_detail_info.items(): - if value is None: - value = '' - # buildrequired value - if isinstance(value, list): - value = '、'.join(value) - if key in _show_field_name: - _line_content.append('%-15s:%s' % (key, value)) - for content in _line_content: - self.print_(content=content) - - def __parse_provides(self, subpacks): - """ - - """ - if subpacks and isinstance(subpacks, list): - for _subpack in subpacks: - pass - - def __parse_requires(self, subpacks): - """ - - """ - pass - - def __parse_package(self, response_data): + def parse_package(self, response_data): """ Description: Parse the corresponding data of the package Args: @@ -1266,81 +1226,6 @@ class SingleCommand(PkgshipCommand): Raises: """ - - if response_data.get('code') == ResponseCode.SUCCESS: - self.__parse_package_detail(response_data) - try: - _subpacks = response_data['data']['subpack'] - self.__parse_provides(_subpacks) - self.__parse_requires(_subpacks) - except KeyError as key_error: - LOGGER.logger.error(key_error) - else: - print(response_data.get('msg')) - - def do_command(self, params): - """ - Description: Action to execute command - Args: - params: command lines params - Returns: - - Raises: - ConnectionError: requests connection error - """ - self._set_read_host(params.remote) - _url = self.read_host + \ - '/packages/packageInfo?table_name={db_name}&pkg_name={packagename}' \ - .format(db_name=params.db, packagename=params.packagename) - try: - response = requests.get(_url) - except ConnErr as conn_error: - LOGGER.logger.error(conn_error) - print(str(conn_error)) - else: - if response.status_code == 200: - self.__parse_package(json.loads(response.text)) - else: - self.http_error(response) - - -class IssueCommand(PkgshipCommand): - """ - Description: Get the issue list - Attributes: - parse: Command line parsing example - params: Command line parameters - """ - - def __init__(self): - """ - Description: Class instance initialization - """ - super(IssueCommand, self).__init__() - - self.parse = PkgshipCommand.subparsers.add_parser( - 'issue', help='Query the issue list of the specified package') - self.params = [ - ('packagename', 'str', 'source package name', '', 'store'), - ('-db', 'str', 'name of the database operated', '', 'store'), - ('-remote', 'str', 'The address of the remote service', False, 'store_true') - ] - - def register(self): - """ - Description: Command line parameter injection - - """ - super(IssueCommand, self).register() - self.parse.set_defaults(func=self.do_command) - - def parse_package(self, response_data): - """ - Description: Parse the corresponding data of the package - - Args: - response_data: http response data - """ show_field_name = ('sourceName', 'dbname', 'version', 'license', 'maintainer', 'maintainlevel') print_contents = [] @@ -1373,7 +1258,7 @@ class IssueCommand(PkgshipCommand): """ self._set_read_host(params.remote) _url = self.read_host + \ - '/packages/issueTrace?dbName={db_name}&sourceName={packagename}' \ + '/packages/packageInfo?dbName={db_name}&sourceName={packagename}' \ .format(db_name=params.db, packagename=params.packagename) try: response = requests.get(_url) @@ -1387,65 +1272,5 @@ class IssueCommand(PkgshipCommand): self.http_error(response) -class ImportCommand(PkgshipCommand): - """ - Description: Import package information in the life cycle - Attributes: - parse: Command line parsing example - params: Command line parameters - """ - - def __init__(self): - """ - Description: Class instance initialization - """ - super(ImportCommand, self).__init__() - - self.parse = PkgshipCommand.subparsers.add_parser( - 'import', help='Import package information in the life cycle') - self.params = [ - ('tablename', 'str', 'The name of the table to be created', '', 'store'), - ('filepath', 'str', 'Imported sqlite file path', '', 'store'), - ] - - def register(self): - """ - Description: Command line parameter injection - - """ - super(ImportCommand, self).register() - self.parse.set_defaults(func=self.do_command) - - def do_command(self, params): - """ - Description: Action to execute command - Args: - params: command lines params - Returns: - - Raises: - ConnectionError: requests connection error - """ - _url = self.write_host + '/lifeCycle/importdata' - try: - response = requests.post( - _url, - data=json.dumps( - {'filepath': params.filepath, 'tablename': params.tablename}), - headers=self.headers) - except ConnErr as conn_error: - LOGGER.logger.error(conn_error) - print(str(conn_error)) - else: - if response.status_code == 200: - _response_content = json.loads(response.text) - if _response_content.get('code') == ResponseCode.SUCCESS: - print('import success') - else: - print('import failure') - else: - self.http_error(response) - - if __name__ == '__main__': main() diff --git a/packageship/packageship/selfpkg.py b/packageship/packageship/selfpkg.py index 4b9a1028..f748eb3f 100644 --- a/packageship/packageship/selfpkg.py +++ b/packageship/packageship/selfpkg.py @@ -24,8 +24,6 @@ except Error as error: raise Exception('Service failed to start') else: from packageship.application.app_global import identity_verification - from packageship.application.apps.lifecycle.function import start_tasks - start_tasks() @app.before_request -- Gitee From f7003f3fba08b173e12bcbcd7d6c11ab6df2d35c Mon Sep 17 00:00:00 2001 From: gongzt Date: Mon, 10 Aug 2020 11:12:40 +0800 Subject: [PATCH 15/19] Call remote api service from command line --- .../packageship/application/__init__.py | 6 --- .../packageship/application/settings.py | 2 - packageship/packageship/pkgship.py | 46 +++---------------- packageship/packageship/system_config.py | 2 +- 4 files changed, 8 insertions(+), 48 deletions(-) diff --git a/packageship/packageship/application/__init__.py b/packageship/packageship/application/__init__.py index 8b65f98d..bc3a6316 100644 --- a/packageship/packageship/application/__init__.py +++ b/packageship/packageship/application/__init__.py @@ -4,7 +4,6 @@ """ from flask import Flask from flask_session import Session -from flask_apscheduler import APScheduler from packageship.application.settings import Config from packageship.libs.log import setup_log @@ -24,11 +23,6 @@ def init_app(operation): app.config.from_object(Config) - # Register a scheduled task - scheduler = APScheduler() - scheduler.init_app(app) - scheduler.start() - # Open session function Session(app) diff --git a/packageship/packageship/application/settings.py b/packageship/packageship/application/settings.py index f6516125..bc090439 100644 --- a/packageship/packageship/application/settings.py +++ b/packageship/packageship/application/settings.py @@ -19,8 +19,6 @@ class Config(): LOG_LEVEL = 'INFO' - SCHEDULER_API_ENABLED = True - def __init__(self): self._read_config = ReadConfig() diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index fd2b3a24..ae8e0077 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -550,11 +550,6 @@ class AllPackageCommand(PkgshipCommand): self.params = [('-db', 'str', 'name of the database operated', '', 'store'), ('-remote', 'str', 'The address of the remote service', False, 'store_true'), - ('-pkgname', 'str', - 'Package name that needs fuzzy matching', '', 'store'), - ('-maintainner', 'str', 'Maintainer\'s name', '', 'store'), - ('-maintainlevel', 'str', - 'Maintain the level of data', '', 'store'), ] def register(self): @@ -570,26 +565,6 @@ class AllPackageCommand(PkgshipCommand): super(AllPackageCommand, self).register() self.parse.set_defaults(func=self.do_command) - def __parse_package(self, response_data, table_name): - """ - Description: Parse the corresponding data of the package - Args: - response_data: http request response content - Returns: - - Raises: - - """ - if response_data.get('code') == ResponseCode.SUCCESS: - package_all = response_data.get('data') - if isinstance(package_all, list): - for package_item in package_all: - row_data = [package_item.get('name'), table_name, package_item.get( - 'version'), package_item.get('license')] - self.table.add_row(row_data) - else: - print(response_data.get('msg')) - def do_command(self, params): """ Description: Action to execute command @@ -602,12 +577,7 @@ class AllPackageCommand(PkgshipCommand): """ self._set_read_host(params.remote) _url = self.read_host + \ - '/packages?table_name={table_name}&query_pkg_name={pkg_name}& \ - maintainner={maintainner}&maintainlevel={maintainlevel}'.format( - table_name=params.db, - pkg_name=params.pkgname, - maintainner=params.maintainner, - maintainlevel=params.maintainlevel) + '/packages?dbName={dbName}'.format(dbName=params.db) try: response = requests.get(_url) except ConnErr as conn_error: @@ -616,7 +586,7 @@ class AllPackageCommand(PkgshipCommand): else: if response.status_code == 200: - self.__parse_package(json.loads(response.text), params.db) + self.parse_package(json.loads(response.text)) if self.table: print(self.table) else: @@ -642,9 +612,8 @@ class UpdatePackageCommand(PkgshipCommand): self.params = [ ('packagename', 'str', 'Source package name', '', 'store'), ('db', 'str', 'name of the database operated', '', 'store'), - ('-maintainer', 'str', 'Maintainers name', '', 'store'), - ('-maintainlevel', 'int', 'database priority', 1, 'store'), - ('-endoflife', 'str', 'package expiry date', '', 'store') + ('-m', 'str', 'Maintainers name', '', 'store'), + ('-l', 'int', 'database priority', 1, 'store'), ] def register(self): @@ -670,14 +639,13 @@ class UpdatePackageCommand(PkgshipCommand): Raises: ConnectionError: Request connection error """ - _url = self.write_host + '/lifeCycle/updatePkgInfo' + _url = self.write_host + '/packages/packageInfo' try: response = requests.put( _url, data=json.dumps({'pkg_name': params.packagename, 'table_name': params.db, - 'maintainer': params.maintainer, - 'maintainlevel': params.maintainlevel, - 'end_of_life': params.endoflife}), + 'maintainer': params.m, + 'maintainlevel': params.l}), headers=self.headers) except ConnErr as conn_error: LOGGER.logger.error(conn_error) diff --git a/packageship/packageship/system_config.py b/packageship/packageship/system_config.py index c5952ec1..14de44a9 100644 --- a/packageship/packageship/system_config.py +++ b/packageship/packageship/system_config.py @@ -30,4 +30,4 @@ DATABASE_FOLDER_PATH = os.path.join('/', 'var', 'run', 'pkgship_dbs') # If the directory of log storage is not configured, # it will be stored in the following directory specified by the system by default -LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'logs') +LOG_FOLDER_PATH = os.path.join('/', 'var', 'log', 'pkgship') -- Gitee From c7ba5688b261fd953062f4c623869ceca2ee9936 Mon Sep 17 00:00:00 2001 From: gongzt Date: Mon, 10 Aug 2020 11:19:12 +0800 Subject: [PATCH 16/19] Receiving parameter modification of update package information --- packageship/packageship/pkgship.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index ae8e0077..d5308e5f 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -642,8 +642,8 @@ class UpdatePackageCommand(PkgshipCommand): _url = self.write_host + '/packages/packageInfo' try: response = requests.put( - _url, data=json.dumps({'pkg_name': params.packagename, - 'table_name': params.db, + _url, data=json.dumps({'sourceName': params.packagename, + 'dbName': params.db, 'maintainer': params.m, 'maintainlevel': params.l}), headers=self.headers) -- Gitee From c73437c8a5c3a2f80ce6a459f88c69376ac30e48 Mon Sep 17 00:00:00 2001 From: gongzhengtang Date: Mon, 10 Aug 2020 13:42:10 +0800 Subject: [PATCH 17/19] =?UTF-8?q?=E6=B3=A8=E9=87=8Aspec=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packageship/pkgship.spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packageship/pkgship.spec b/packageship/pkgship.spec index 860f3624..7462bf44 100644 --- a/packageship/pkgship.spec +++ b/packageship/pkgship.spec @@ -26,7 +26,7 @@ Pkgship implements rpm package dependence ,maintainer, patch query and so no. %check -%{__python3} -m unittest test/run_tests.py +#%{__python3} -m unittest test/run_tests.py %post #build cli bin -- Gitee From d487dc39dff3819ce1135e956ae2dc10ee82e570 Mon Sep 17 00:00:00 2001 From: gongzhengtang Date: Mon, 10 Aug 2020 15:28:54 +0800 Subject: [PATCH 18/19] =?UTF-8?q?=E5=A2=9E=E5=8A=A0spce=E7=9A=84change=20l?= =?UTF-8?q?og=E5=86=85=E5=AE=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packageship/pkgship.spec | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/packageship/pkgship.spec b/packageship/pkgship.spec index 7462bf44..8849aa96 100644 --- a/packageship/pkgship.spec +++ b/packageship/pkgship.spec @@ -1,6 +1,6 @@ Name: pkgship Version: 1.0 -Release: 4 +Release: 5 Summary: Pkgship implements rpm package dependence ,maintainer, patch query and so no. License: Mulan 2.0 URL: https://gitee.com/openeuler/openEuler-Advisor @@ -26,7 +26,7 @@ Pkgship implements rpm package dependence ,maintainer, patch query and so no. %check -#%{__python3} -m unittest test/run_tests.py +%{__python3} -m unittest test/run_tests.py %post #build cli bin @@ -61,6 +61,9 @@ rm -rf %{python3_sitelib}/packageship/build %{python3_sitelib}/packageship/dist %changelog +* Mon Aug 10 2020 Yiru Wang - 1.0-5 +- Command line supports calling remote services + * Wed Aug 5 2020 Yiru Wang - 1.0-4 - change Requires rpm pakcages' name to latest one -- Gitee From 009369c5e106d823614580cca710c9c623347e17 Mon Sep 17 00:00:00 2001 From: gongzhengtang Date: Mon, 10 Aug 2020 15:34:21 +0800 Subject: [PATCH 19/19] =?UTF-8?q?=E6=9B=B4=E6=94=B9spec=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E4=B8=ADchange=20log=20=E7=BB=B4=E6=8A=A4=E4=BA=BA=E7=9A=84?= =?UTF-8?q?=E5=90=8D=E7=A7=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packageship/pkgship.spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packageship/pkgship.spec b/packageship/pkgship.spec index 8849aa96..e837b546 100644 --- a/packageship/pkgship.spec +++ b/packageship/pkgship.spec @@ -61,7 +61,7 @@ rm -rf %{python3_sitelib}/packageship/build %{python3_sitelib}/packageship/dist %changelog -* Mon Aug 10 2020 Yiru Wang - 1.0-5 +* Mon Aug 10 2020 Zhengtang Gong - 1.0-5 - Command line supports calling remote services * Wed Aug 5 2020 Yiru Wang - 1.0-4 -- Gitee