diff --git a/packageship/packageship/application/apps/package/function/be_depend.py b/packageship/packageship/application/apps/package/function/be_depend.py index 84654af72ec420727fa35fd132f25fd6460305d0..da50f0d4c6db7af261aec6302d0c4e2d6e7c8140 100644 --- a/packageship/packageship/application/apps/package/function/be_depend.py +++ b/packageship/packageship/application/apps/package/function/be_depend.py @@ -70,68 +70,54 @@ class BeDepend(): ] self.source_name_set.add(self.source_name) self.package_bedepend( - [src_obj.id], data_base, package_type='src') + [self.source_name], data_base, package_type='src') return self.result_dict - def package_bedepend(self, pkg_id_list, data_base, package_type): + def package_bedepend(self, pkg_name_list, data_base, package_type): """ Description: Query the dependent function Args: - pkg_id_list:source or binary packages id + pkg_name_list:source or binary packages name data_base: database package_type: package type Returns: Raises: SQLAlchemyError: Database connection exception """ - search_set = set(pkg_id_list) - id_in = literal_column('id').in_(search_set) + search_set = set(pkg_name_list) # package_type if package_type == 'src': - sql_str = text(""" - SELECT b1.name AS search_bin_name, - b1.version AS search_bin_version, - src.NAME AS source_name, - b2.name AS bin_name, - b2.id AS bin_id, - s1.name AS bebuild_src_name, - s1.id AS bebuild_src_id, - s2.name AS install_depend_src_name, - s2.id AS install_depend_src_id - FROM - ( SELECT id,NAME FROM src_pack WHERE {} ) src - LEFT JOIN bin_pack b1 ON b1.srcIDkey = src.id - LEFT JOIN pack_provides ON pack_provides.binIDkey = b1.id - LEFT JOIN pack_requires ON pack_requires.depProIDkey = pack_provides.id - LEFT JOIN src_pack s1 ON s1.id = pack_requires.srcIDkey - LEFT JOIN bin_pack b2 ON b2.id = pack_requires.binIDkey - LEFT JOIN src_pack s2 ON s2.id = b2.srcIDkey;""".format(id_in)) + name_in = literal_column('src_name').in_(search_set) if package_type == 'bin': - sql_str = text(""" - SELECT b1.name AS search_bin_name, - b1.version AS search_bin_version, - s3.NAME AS source_name, - b2.name AS bin_name, - b2.id AS bin_id, - s1.name AS bebuild_src_name, - s1.id AS bebuild_src_id, - s2.name AS install_depend_src_name, - s2.id AS install_depend_src_id - FROM - (SELECT id,NAME,version,srcIDkey FROM bin_pack WHERE {} ) b1 - LEFT JOIN src_pack s3 ON s3.id = b1.srcIDkey - LEFT JOIN pack_provides ON pack_provides.binIDkey = b1.id - LEFT JOIN pack_requires ON pack_requires.depProIDkey = pack_provides.id - LEFT JOIN src_pack s1 ON s1.id = pack_requires.srcIDkey - LEFT JOIN bin_pack b2 ON b2.id = pack_requires.binIDkey - LEFT JOIN src_pack s2 ON s2.id = b2.srcIDkey; - """.format(id_in)) + name_in = literal_column('name').in_(search_set) + + sql_str = text(""" + SELECT b1.name AS search_bin_name, + b1.version AS search_bin_version, + b1.src_name AS source_name, + b2.name AS bin_name, + s1.name AS bebuild_src_name, + b2.src_name AS install_depend_src_name + FROM ( SELECT pkgKey,src_name,name,version FROM bin_pack WHERE {} ) b1 + LEFT JOIN bin_provides ON bin_provides.pkgKey = b1.pkgKey + LEFT JOIN bin_requires br ON br.name = bin_provides.name + LEFT JOIN src_requires sr ON sr.name = bin_provides.name + LEFT JOIN src_pack s1 ON s1.pkgKey = sr.pkgKey + LEFT JOIN bin_pack b2 ON b2.pkgKey = br.pkgKey;""".format(name_in)) + try: - result = data_base.session.execute( - sql_str, { - 'id_{}'.format(i): v for i, v in enumerate( - search_set, 1)}).fetchall() + if package_type == 'src': + result = data_base.session.execute( + sql_str, { + 'src_name_{}'.format(i): v for i, v in enumerate( + search_set, 1)}).fetchall() + if package_type == 'bin': + result = data_base.session.execute( + sql_str, { + 'name_{}'.format(i): v for i, v in enumerate( + search_set, 1)}).fetchall() + except SQLAlchemyError as sql_err: current_app.logger.error(sql_err) return ResponseCode.response_json(ResponseCode.CONNECT_DB_ERROR) @@ -139,8 +125,8 @@ class BeDepend(): if result is None: return # Source and binary packages that were found to be dependent - source_id_list = [] - bin_id_list = [] + source_name_list = [] + bin_name_list = [] for obj in result: if obj.source_name is None: source_name = 'NOT FOUND' @@ -160,7 +146,7 @@ class BeDepend(): if obj.bebuild_src_name not in self.source_name_set: self.source_name_set.add(obj.bebuild_src_name) - source_id_list.append(obj.bebuild_src_id) + source_name_list.append(obj.bebuild_src_name) if obj.bin_name: # Determine if the bin package has been checked @@ -176,20 +162,34 @@ class BeDepend(): if obj.bin_name not in self.bin_name_set: self.bin_name_set.add(obj.bin_name) - bin_id_list.append(obj.bin_id) + bin_name_list.append(obj.bin_name) - # withsubpack=1 + # With_sub_pack=1 if self.with_sub_pack == "1": if obj.install_depend_src_name not in self.source_name_set: self.source_name_set.add( obj.install_depend_src_name) - source_id_list.append(obj.install_depend_src_id) + source_name_list.append( + obj.install_depend_src_name) - if len(source_id_list) != 0: + # Sqlite older versions default to a single query with a maximum of 999 + # parameters + if 0 < len(source_name_list) < 999: self.package_bedepend( - source_id_list, data_base, package_type="src") - if len(bin_id_list) != 0: - self.package_bedepend(bin_id_list, data_base, package_type="bin") + source_name_list, data_base, package_type="src") + elif len(source_name_list) >= 999: + count = len(source_name_list) // 999 + for i in range(count + 1): + self.package_bedepend( + source_name_list[999 * i:999 * (i + 1)], data_base, package_type="src") + + if 0 < len(bin_name_list) < 999: + self.package_bedepend(bin_name_list, data_base, package_type="bin") + elif len(bin_name_list) >= 999: + count = len(bin_name_list) // 999 + for i in range(count + 1): + self.package_bedepend( + bin_name_list[999 * i:999 * (i + 1)], data_base, package_type="bin") def make_dicts(self, key, source_name, version, parent_node, be_type): """ diff --git a/packageship/packageship/application/apps/package/function/build_depend.py b/packageship/packageship/application/apps/package/function/build_depend.py index 672cbe60ea27a9e98a8936cac6d58f04d78dd66b..b65464b954fbf7c4a75f81cc199ea930e6115f2c 100644 --- a/packageship/packageship/application/apps/package/function/build_depend.py +++ b/packageship/packageship/application/apps/package/function/build_depend.py @@ -80,9 +80,9 @@ class BuildDepend(): res_status, build_list = self.search_db.get_build_depend(pkg_list) if not build_list: - return res_status if res_status == \ - ResponseCode.DIS_CONNECTION_DB else \ + return res_status if res_status == ResponseCode.DIS_CONNECTION_DB else \ ResponseCode.PACK_NAME_NOT_FOUND + # create root node and get next search list search_list = self._create_node_and_get_search_list(build_list, pkg_list) @@ -153,7 +153,7 @@ class BuildDepend(): self.result_dict[obj.bin_name] = [ obj.source_name, obj.version, - obj.db_name, + self.search_db.binary_search_database_for_first_time(obj.bin_name), [ [obj.search_name, 'build'] ] @@ -191,19 +191,21 @@ class BuildDepend(): return # generate data content + search_name_set = set() for obj in bin_info_lis: + search_name_set.add(obj.search_name) + if obj.search_name not in self.source_dict: + self.source_dict[obj.search_name] = [obj.db_name, obj.search_version] + if not obj.bin_name: continue - # for first loop, init the source_dict - if not self.source_dict: - for src_name in pkg_name_li: - self.source_dict[src_name] = [obj.db_name, obj.search_version] + if obj.bin_name not in self.result_dict: self.result_dict[obj.bin_name] = [ obj.source_name if obj.source_name else None, obj.version if obj.version else None, - obj.db_name if obj.db_name else "NOT_FOUND", + self.search_db.binary_search_database_for_first_time(obj.bin_name), [ [obj.search_name, "build"] ] @@ -216,11 +218,14 @@ class BuildDepend(): if obj.source_name and \ obj.source_name not in self.source_dict and \ - obj.source_name not in self.history_dicts: - self.source_dict[obj.source_name] = [obj.db_name, - obj.version] + obj.source_name not in self.history_dicts: next_src_set.add(obj.source_name) + not_found_pkg = set(pkg_name_li) - search_name_set + for pkg_name in not_found_pkg: + if pkg_name not in self.source_dict: + self.source_dict[pkg_name] = ['NOT FOUND', 'NOT FOUND'] + not_found_pkg.clear() self.self_build(next_src_set) return diff --git a/packageship/packageship/application/apps/package/function/packages.py b/packageship/packageship/application/apps/package/function/packages.py index 7d3ca4557237716ff4559dbec9de9dce2c647596..5bf347d6ed186d9db1f9dc37d8e0c802d9e8679a 100644 --- a/packageship/packageship/application/apps/package/function/packages.py +++ b/packageship/packageship/application/apps/package/function/packages.py @@ -5,19 +5,26 @@ functions: get_packages, buildep_packages, sub_packages, get_single_package, update_single_package, update_maintaniner_info """ from flask import current_app +from flask import jsonify +from sqlalchemy.exc import SQLAlchemyError + +from packageship.application.apps.package.function.constants import ResponseCode +from packageship.application.apps.package.function.searchdb import db_priority from packageship.libs.dbutils import DBHelper from packageship.application.models.package import src_pack -from packageship.application.models.package import pack_provides -from packageship.application.models.package import maintenance_info -from packageship.application.models.package import pack_requires +from packageship.application.models.package import src_requires from packageship.application.models.package import bin_pack +from packageship.application.models.package import maintenance_info +from packageship.application.models.package import bin_requires +from packageship.application.models.package import bin_provides from packageship.libs.exception import Error def get_packages(dbname): """ - Description: Get all packages info + Get all packages info in search databases + Args: dbname: Database name Returns: @@ -28,89 +35,146 @@ def get_packages(dbname): """ with DBHelper(db_name=dbname) as db_name: src_pack_queryset = db_name.session.query(src_pack).all() - resp_list = [] + if src_pack_queryset is None: + return None + resp_list = list() for src_pack_obj in src_pack_queryset: - package = {} + package = dict() package["sourceName"] = src_pack_obj.name package["version"] = src_pack_obj.version - package["license"] = src_pack_obj.license - package["maintainer"] = src_pack_obj.Maintaniner - package["maintainlevel"] = src_pack_obj.MaintainLevel - package["sourceURL"] = src_pack_obj.sourceURL - package["maintainlevel"] = src_pack_obj.MaintainLevel - package["downloadURL"] = src_pack_obj.downloadURL + package["license"] = src_pack_obj.rpm_license + package["sourceURL"] = src_pack_obj.url + package["rpm_packager"] = src_pack_obj.rpm_packager + package["maintainer"] = src_pack_obj.maintaniner + package["maintainlevel"] = src_pack_obj.maintainlevel package["dbname"] = dbname resp_list.append(package) return resp_list -def buildep_packages(dbname, src_pack_id): +def get_all_packages(db_name): + """ + all packages info + + Args: + db_name: database name + Returns: + response code: response status code + """ + dbpreority = db_priority() + if dbpreority is None: + return jsonify( + ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND) + ) + if not db_name: + response = [] + for dbname in dbpreority: + query_result = get_packages(dbname) + if query_result is None: + return None + for item in query_result: + if item is None: + query_result.remove(item) + response.append(item) + return jsonify( + ResponseCode.response_json(ResponseCode.SUCCESS, response) + ) + if db_name not in dbpreority: + return jsonify( + ResponseCode.response_json(ResponseCode.DB_NAME_ERROR) + ) + response = get_packages(db_name) + if not response: + return jsonify( + ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND) + ) + return jsonify( + ResponseCode.response_json(ResponseCode.SUCCESS, response) + ) + + +def buildep_packages(dbname, src_pack_pkgkey): """ - Description: Query package layer 1 compilation dependency + Query package layer 1 compilation dependency + Args: dbname: databases name - src_pack_id: The ID of the source package + src_pack_pkgkey: The ID of the source package Returns: buildDep Compile dependencies of source packages Raises: AttributeError: Object does not have this property """ with DBHelper(db_name=dbname) as db_name: - b_pack_requires_set = db_name.session.query( - pack_requires).filter_by(srcIDkey=src_pack_id).all() - b_dep_proid_keys = [ - dep_proid_obj.depProIDkey for dep_proid_obj in b_pack_requires_set] - b_pack_pro_set = db_name.session.query(pack_provides).filter( - pack_provides.id.in_(b_dep_proid_keys)).all() - b_bin_pack_ids = [ - bin_pack_obj.binIDkey for bin_pack_obj in b_pack_pro_set] + # srcpack's pkgkey to src_requires find pkgkey + s_pack_requires_set = db_name.session.query( + src_requires).filter_by(pkgKey=src_pack_pkgkey).all() + # src_requires pkykey to find the name of the dependent component + s_pack_requires_names = [ + s_pack_requires_obj.name for s_pack_requires_obj in s_pack_requires_set] + + # Find pkgkey in bin_provides by the name of the dependent component + b_pack_provides_set = db_name.session.query(bin_provides).filter( + bin_provides.name.in_(s_pack_requires_names)).all() + b_pack_provides_pkg_list = [ + b_pack_provides_obj.pkgKey for b_pack_provides_obj in b_pack_provides_set] + + # Go to bin_pack to find the name by pkgkey of bin_provides b_bin_pack_set = db_name.session.query(bin_pack).filter( - bin_pack.id.in_(b_bin_pack_ids)).all() - builddep = [bin_pack_obj.name for bin_pack_obj in b_bin_pack_set] + bin_pack.pkgKey.in_(b_pack_provides_pkg_list)).all() + builddep = [b_bin_pack_obj.name for b_bin_pack_obj in b_bin_pack_set] return builddep -def sub_packages(dbname, src_pack_id): +def sub_packages(dbname, sourcename): """ - Description: Query package layer 1 installation dependency + Query package layer 1 installation dependency + Args: dbname: databases name - src_pack_id: srcpackage id + src_pack_pkgkey: srcpackage id Returns: subpack Source package to binary package, then find the installation dependencies - of the binary package + of the binary package Raises: AttributeError: Object does not have this property """ with DBHelper(db_name=dbname) as db_name: - subpack = {} + subpack = dict() + # The name of src_pack finds the sub-package bin_pack query set i_bin_pack_set = db_name.session.query( - bin_pack).filter_by(srcIDkey=src_pack_id).all() - i_bin_pack_ids = [ - bin_pack_obj.id for bin_pack_obj in i_bin_pack_set] - for i_bin_pack_id in i_bin_pack_ids: - i_bin_pack_name = db_name.session.query( - bin_pack).filter_by(id=i_bin_pack_id).first().name - i_pack_req_set = db_name.session.query( - pack_requires).filter_by(binIDkey=i_bin_pack_id).all() - i_dep_proid_keys = [ - dep_proid_obj.depProIDkey for dep_proid_obj in i_pack_req_set] - i_dep_proid_keys = list(set(i_dep_proid_keys)) - i_pack_provides_set = db_name.session.query(pack_provides).filter( - pack_provides.id.in_(i_dep_proid_keys)).all() - i_bin_pack_ids = [ - bin_pack_obj.binIDkey for bin_pack_obj in i_pack_provides_set] + bin_pack).filter_by(src_name=sourcename).all() + if i_bin_pack_set is None: + return subpack + # Find the objects of each sub-package + for b_bin_pack_obj in i_bin_pack_set: + i_bin_pack_name = b_bin_pack_obj.name + i_bin_pack_pkgkey = b_bin_pack_obj.pkgKey + # Find the names of the components required to install bin_requires + # dependencies + i_bin_requires_set = db_name.session.query( + bin_requires).filter_by(pkgKey=i_bin_pack_pkgkey).all() + i_bin_requires_names = [ + b_bin_requires_obj.name for b_bin_requires_obj in i_bin_requires_set] + # Find pkykey in bin_provides by the name of the dependent + # component + i_bin_provides_set = db_name.session.query(bin_provides).filter( + bin_provides.name.in_(i_bin_requires_names)) + i_bin_provides_pkg_list = [ + i_bin_provides_obj.pkgKey for i_bin_provides_obj in i_bin_provides_set] + # Find the name in bin_pack by pkgkey i_bin_pack_set = db_name.session.query(bin_pack).filter( - bin_pack.id.in_(i_bin_pack_ids)).all() + bin_pack.pkgKey.in_(i_bin_provides_pkg_list)) i_bin_pack_names = [ - bin_pack_obj.name for bin_pack_obj in i_bin_pack_set] + in_bin_pack_obj.name for in_bin_pack_obj in i_bin_pack_set] subpack[i_bin_pack_name] = i_bin_pack_names return subpack def get_single_package(dbname, sourcename): """ - Description: Get all packages info + Get single packages info + Args: dbname: Database name sourcename: Source package name @@ -120,89 +184,119 @@ def get_single_package(dbname, sourcename): AttributeError: Object does not have this property """ with DBHelper(db_name=dbname) as db_name: - package = {} + package = dict() src_pack_obj = db_name.session.query(src_pack).filter_by( name=sourcename).first() + if src_pack_obj is None: + return None package["sourceName"] = src_pack_obj.name package["version"] = src_pack_obj.version - package["license"] = src_pack_obj.license - package["maintainer"] = src_pack_obj.Maintaniner - package["maintainlevel"] = src_pack_obj.MaintainLevel - package["sourceURL"] = src_pack_obj.sourceURL - package["downloadURL"] = src_pack_obj.downloadURL + package["license"] = src_pack_obj.rpm_license + package["sourceURL"] = src_pack_obj.url + package["rpm_packager"] = src_pack_obj.rpm_packager + package["maintainer"] = src_pack_obj.maintaniner + package["maintainlevel"] = src_pack_obj.maintainlevel package["dbname"] = dbname - src_pack_id = src_pack_obj.id - builddep = buildep_packages(dbname, src_pack_id) - subpack = sub_packages(dbname, src_pack_id) + src_pack_pkgkey = src_pack_obj.pkgKey + builddep = buildep_packages(dbname, src_pack_pkgkey) + subpack = sub_packages(dbname, sourcename) package['buildDep'] = builddep package['subpack'] = subpack return package -def update_single_package( +def get_single(dbnames, sourcename): + """ + get single package + + Args: + dbname: database name + sourcename: source name + """ + response_data = None + dbpreority = db_priority() + if db_priority is None: + response_data = ResponseCode.FILE_NOT_FOUND + + if not dbnames: + response = [] + for db_names in dbpreority: + query_result = get_single_package(db_names, sourcename) + response.append(query_result) + for key in response: + if key is None: + response.remove(key) + if not response: + return jsonify( + ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND) + ) + return jsonify( + ResponseCode.response_json(ResponseCode.SUCCESS, response) + ) + + # Database queries data and catches exceptions + if dbnames not in dbpreority: + return jsonify( + ResponseCode.response_json(ResponseCode.DB_NAME_ERROR) + ) + response = get_single_package(dbnames, sourcename) + if response is None: + response_data = ResponseCode.PACK_NAME_NOT_FOUND + if response_data is not None: + return jsonify(ResponseCode.response_json(response_data)) + return jsonify( + ResponseCode.response_json(ResponseCode.SUCCESS, [response]) + ) + + +def _update_package_info( package_name, dbname, maintainer, maintain_level): """ - Description: change single package management - Args: + change single package management + + Args: package_name: package name dbname: Database name maintainer: maintainer info maintain_level: maintain_level info - Returns: + Returns: message success or failed Raises: AttributeError: Object does not have this property - TypeError: Abnormal error - """ - with DBHelper(db_name=dbname) as db_name: - update_obj = db_name.session.query( - src_pack).filter_by(name=package_name).first() - update_obj.Maintaniner = maintainer - update_obj.MaintainLevel = maintain_level - db_name.session.commit() - - -def update_maintaniner_info(package_name, - dbname, - maintaniner, - maintainlevel): - """ - Description: update separately maintaniner info - Args: - package_name: package name - dbname: Database name - maintainer: maintainer info - maintain_level: maintain_level info - Returns: - message success or failed - Raises: - AttributeError: Object does not have this property - Error: Abnormal error + SQLAlchemyError: Exception of type + Error: Abnormal error """ - with DBHelper(db_name=dbname) as db_name: - src_pack_obj = db_name.session.query(src_pack).filter_by( - name=package_name).first() - name = src_pack_obj.name - version = src_pack_obj.version - with DBHelper(db_name='maintenance.information') as dbs_name: - try: + try: + result_data = True + with DBHelper(db_name=dbname) as data_name: + update_obj = data_name.session.query( + src_pack).filter_by(name=package_name).first() + if update_obj is None: + return False + update_obj.maintaniner = maintainer + update_obj.maintainlevel = maintain_level + data_name.session.commit() + name = update_obj.name + version = update_obj.version + with DBHelper(db_name='maintenance.information') as dbs_name: information_obj = dbs_name.session.query(maintenance_info).filter_by( name=package_name, version=version).first() if information_obj is None: information = maintenance_info( name=name, version=version, - maintaniner=maintaniner, - maintainlevel=maintainlevel) + maintaniner=maintainer, + maintainlevel=maintain_level) dbs_name.session.add(information) dbs_name.session.commit() else: - information_obj.maintaniner = maintaniner - information_obj.maintainlevel = maintainlevel + information_obj.maintaniner = maintainer + information_obj.maintainlevel = maintain_level dbs_name.session.commit() - except (AttributeError, Error) as attri_error: - current_app.logger.error(attri_error) - return + return result_data + except (AttributeError, SQLAlchemyError, Error) as attri_error: + current_app.logger.error(attri_error) + raise attri_error diff --git a/packageship/packageship/application/apps/package/function/searchdb.py b/packageship/packageship/application/apps/package/function/searchdb.py index bba2994b5dea437de38a69d2bf9d51c24c649527..b8a91112aac12c4c2161f79454e5c6cbcf37bf60 100644 --- a/packageship/packageship/application/apps/package/function/searchdb.py +++ b/packageship/packageship/application/apps/package/function/searchdb.py @@ -11,6 +11,7 @@ from flask import current_app from sqlalchemy import text from sqlalchemy.exc import SQLAlchemyError, DisconnectionError from sqlalchemy.sql import literal_column +from sqlalchemy import exists from packageship.libs.dbutils import DBHelper from packageship.libs.log import Log @@ -30,6 +31,7 @@ class SearchDB(): db_object_dict:A dictionary for storing database connection objects changeLog: """ + def __new__(cls, *args, **kwargs): # pylint: disable=w0613 if not hasattr(cls, "_instance"): @@ -61,6 +63,7 @@ class SearchDB(): """ result_list = [] get_list = [] + provides_not_found = dict() if not self.db_object_dict: LOGGER.logger.warning("Unable to connect to the database, \ check the database configuration") @@ -72,24 +75,27 @@ class SearchDB(): LOGGER.logger.warning( "The input is None, please check the input value.") return result_list + return_tuple = namedtuple('return_tuple', + 'depend_name depend_version depend_src_name \ + search_name search_src_name search_version') for db_name, data_base in self.db_object_dict.items(): try: name_in = literal_column('name').in_(search_set) sql_com = text(""" SELECT DISTINCT - bin_pack.NAME AS depend_name, - bin_pack.version AS depend_version, - s2.NAME AS depend_src_name, - bin.NAME AS search_name, - s1.`name` AS search_src_name, - s1.version AS search_version + bin_pack.NAME AS depend_name, + bin_pack.version AS depend_version, + bin_pack.src_name AS depend_src_name, + bin_requires.NAME AS req_name, + bin.NAME AS search_name, + bin.src_name AS search_src_name, + bin.version AS search_version FROM - ( SELECT id, NAME,srcIDkey FROM bin_pack WHERE {} ) bin - LEFT JOIN pack_requires ON bin.id = pack_requires.binIDkey - LEFT JOIN pack_provides ON pack_provides.id = pack_requires.depProIDkey - LEFT JOIN bin_pack ON bin_pack.id = pack_provides.binIDkey - LEFT JOIN src_pack s1 ON s1.id = bin.srcIDkey - LEFT JOIN src_pack s2 ON s2.id = bin_pack.srcIDkey;""".format(name_in)) + ( SELECT pkgKey,NAME,version,src_name FROM bin_pack WHERE {} ) bin + LEFT JOIN bin_requires ON bin.pkgKey = bin_requires.pkgKey + LEFT JOIN bin_provides ON bin_provides.name = bin_requires.name + LEFT JOIN bin_pack ON bin_pack.pkgKey = bin_provides.pkgKey; + """.format(name_in)) install_set = data_base.session. \ execute(sql_com, {'name_{}'.format(i): v for i, v in enumerate(search_set, 1)}).fetchall() @@ -97,12 +103,28 @@ class SearchDB(): # find search_name in db_name # depend_name's db_name will be found in next loop for result in install_set: - result_list.append((result, db_name)) get_list.append(result.search_name) + if not result.depend_name and result.req_name: + if result.req_name in provides_not_found: + provides_not_found[result.req_name].append([result.search_name, result.search_src_name, result.search_version, db_name]) + else: + provides_not_found[result.req_name] = [[result.search_name, result.search_src_name, result.search_version, db_name]] + else: + obj = return_tuple( + result.depend_name, + result.depend_src_name, + result.depend_version, + result.search_name, + result.search_src_name, + result.search_version, + ) + result_list.append((obj, db_name)) get_set = set(get_list) get_list.clear() search_set.symmetric_difference_update(get_set) if not search_set: + install_result = self._get_install_pro_in_other_database(provides_not_found) + result_list.extend(install_result) return result_list else: continue @@ -110,9 +132,8 @@ class SearchDB(): LOGGER.logger.error(error_msg) except SQLAlchemyError as error_msg: LOGGER.logger.error(error_msg) - return_tuple = namedtuple('return_tuple', - 'depend_name depend_version depend_src_name \ - search_name search_src_name search_version') + install_result = self._get_install_pro_in_other_database(provides_not_found) + result_list.extend(install_result) for binary_name in search_set: result_list.append((return_tuple(None, None, None, binary_name, None, None), 'NOT FOUND')) @@ -137,8 +158,8 @@ class SearchDB(): bin_obj = data_base.session.query(bin_pack).filter_by( name=binary_name ).first() - source_name = bin_obj.src_pack.name - source_version = bin_obj.src_pack.version + source_name = bin_obj.src_name + source_version = bin_obj.version if source_name is not None: return ResponseCode.SUCCESS, db_name, \ source_name, source_version @@ -163,28 +184,26 @@ class SearchDB(): """ if not self.db_object_dict: return ResponseCode.DIS_CONNECTION_DB, None - - if None in source_name_list: - source_name_list.remove(None) - search_set = set(source_name_list) + search_set = set([ + source_name for source_name in source_name_list if source_name]) result_list = [] get_list = [] if not search_set: return ResponseCode.INPUT_NONE, None for db_name, data_base in self.db_object_dict.items(): try: - name_in = literal_column('name').in_(search_set) + name_in = literal_column('src_name').in_(search_set) sql_com = text('''SELECT - t1.NAME as subpack_name, - t2.version as search_version, - t2.NAME as search_name - FROM bin_pack t1, src_pack t2 - WHERE - t2.id = t1.srcIDkey - AND t2.{} + NAME AS subpack_name, + src_name AS search_name, + version AS search_version + FROM + bin_pack + WHERE + {} '''.format(name_in)) subpack_tuple = data_base.session. \ - execute(sql_com, {'name_{}'.format(i): v + execute(sql_com, {'src_name_{}'.format(i): v for i, v in enumerate(search_set, 1)}).fetchall() if subpack_tuple: for result in subpack_tuple: @@ -203,13 +222,13 @@ class SearchDB(): return_tuple = namedtuple( 'return_tuple', 'subpack_name search_version search_name') for search_name in search_set: - LOGGER.logger.warning("Can't not find " + - search_name + " subpack in all database") + # LOGGER.logger.warning("Can't not find " + + # search_name + " subpack in all database") result_list.append( (return_tuple(None, None, search_name), 'NOT_FOUND')) return ResponseCode.SUCCESS, result_list - def get_binary_in_other_database(self, not_found_binary, db_): + def _get_binary_in_other_database(self, not_found_binary): """ Description: Binary package name data not found in the current database, go to other databases to try @@ -235,57 +254,111 @@ class SearchDB(): "version", "db_name", "search_version", - "req_name" ]) - src_req_map = {req_: src for src, req_ in not_found_binary} - - local_search_set = {req_ for _, req_ in not_found_binary} - - local_dict = {k: v for k, v in self.db_object_dict.items() if k != db_} - res = [] + search_list = [] + result_list = [] + for db_name, data_base in self.db_object_dict.items(): + for key, _ in not_found_binary.items(): + search_list.append(key) - for db_name, data_base in local_dict.items(): + search_set = set(search_list) + search_list.clear() try: sql_string = text(""" - SELECT - t3.NAME AS source_name, - t1.NAME AS bin_name, - t1.version, - t3.version as search_version, - t2.NAME AS req_name - FROM - bin_pack t1, - pack_provides t2, - src_pack t3 - WHERE - t2.{} - AND t1.id = t2.binIDkey - AND t1.srcIDkey = t3.id; - """.format(literal_column('name').in_(local_search_set))) - build_set_2 = data_base.session. \ + SELECT DISTINCT + t1.src_name AS source_name, + t1.NAME AS bin_name, + t1.version, + t2.NAME AS req_name + FROM + bin_pack t1, + bin_provides t2 + WHERE + t2.{} + AND t1.pkgKey = t2.pkgKey; + """.format(literal_column('name').in_(search_set))) + bin_set = data_base.session. \ execute(sql_string, {'name_{}'.format(i): v - for i, v in enumerate(local_search_set, 1)}).fetchall() - if not build_set_2: - continue - - res.extend([return_tuple( - src_req_map.get(bin_pack.req_name), - bin_pack.source_name, - bin_pack.bin_name, - bin_pack.version, - db_name, - bin_pack.search_version, - bin_pack.req_name - ) for bin_pack in build_set_2 if bin_pack.bin_name]) - - for obj in res: - local_search_set.remove(obj.req_name) + for i, v in enumerate(search_set, 1)}).fetchall() + if bin_set: + for result in bin_set: + if result.req_name not in not_found_binary: + LOGGER.logger.warning(result.req_name + " contains in two rpm packages!!!") + else: + for source_info in not_found_binary[result.req_name]: + obj = return_tuple( + source_info[0], + result.source_name, + result.bin_name, + result.version, + db_name, + source_info[1] + ) + result_list.append(obj) + del not_found_binary[result.req_name] + if not not_found_binary: + return result_list + except AttributeError as attr_err: + current_app.logger.error(attr_err) + except SQLAlchemyError as sql_err: + current_app.logger.error(sql_err) + + if not_found_binary: + for key, values in not_found_binary.items(): + LOGGER.logger.warning("CANNOT FOUND THE component" + key + " in all database") + return result_list - except AttributeError as attr_error: - current_app.logger.error(attr_error) - except SQLAlchemyError as sql_error: - current_app.logger.error(sql_error) - return res + def _get_install_pro_in_other_database(self, not_found_binary): + if not not_found_binary: + return [] + return_tuple = namedtuple('return_tuple', + 'depend_name depend_version depend_src_name \ + search_name search_src_name search_version') + search_list = [] + result_list = [] + for db_name, data_base in self.db_object_dict.items(): + for key,values in not_found_binary.items(): + search_list.append(key) + search_set = set(search_list) + search_list.clear() + sql_string = text(""" + SELECT DISTINCT + t1.src_name AS source_name, + t1.NAME AS bin_name, + t1.version, + t2.NAME AS req_name + FROM + bin_pack t1, + bin_provides t2 + WHERE + t2.{} + AND t1.pkgKey = t2.pkgKey; + """.format(literal_column('name').in_(search_set))) + bin_set = data_base.session. \ + execute(sql_string, {'name_{}'.format(i): v + for i, v in enumerate(search_set, 1)}).fetchall() + if bin_set: + for result in bin_set: + if result.req_name not in not_found_binary: + LOGGER.logger.warning(result.req_name + " contains in two rpm packages!!!") + else: + for binary_info in not_found_binary[result.req_name]: + obj = return_tuple( + result.bin_name, + result.version, + result.source_name, + binary_info[0], + binary_info[1], + binary_info[2] + ) + result_list.append((obj, binary_info[3])) + del not_found_binary[result.req_name] + if not not_found_binary: + return result_list + # if not_found_binary: + # for key, values in not_found_binary.items(): + # LOGGER.logger.warning("CANNOT FOUND THE component" + key + " in all database") + return result_list def get_build_depend(self, source_name_li): """ @@ -315,89 +388,108 @@ class SearchDB(): if not s_name_set: return ResponseCode.PARAM_ERROR, None - not_found_binary = set() + provides_not_found = dict() build_list = [] for db_name, data_base in self.db_object_dict.items(): - try: - sql_com = text("""SELECT DISTINCT - src.NAME AS search_name, - src.version AS search_version, - s2.NAME AS source_name, - pack_provides.binIDkey AS bin_id, - pack_requires.NAME AS req_name, - bin_pack.version AS version, - bin_pack.NAME AS bin_name - FROM - ( SELECT id, NAME,version FROM src_pack WHERE {} ) src - LEFT JOIN pack_requires ON src.id = pack_requires.srcIDkey - LEFT JOIN pack_provides ON pack_provides.id = pack_requires.depProIDkey - LEFT JOIN bin_pack ON bin_pack.id = pack_provides.binIDkey - LEFT JOIN src_pack s1 ON s1.id = pack_requires.srcIDkey - LEFT JOIN src_pack s2 ON bin_pack.srcIDkey = s2.id; - """.format(literal_column("name").in_(s_name_set))) - - build_set = data_base.session. \ - execute(sql_com, {'name_{}'.format(i): v - for i, v in enumerate(s_name_set, 1)}).fetchall() - if not build_set: - continue - - # When processing source package without compilation dependency - to_remove_obj_index = [] - for index, b_pack in enumerate(build_set): - if not b_pack.source_name and not b_pack.req_name: - obj = return_tuple( - b_pack.search_name, - b_pack.source_name, - b_pack.bin_name, - b_pack.version, - db_name, - b_pack.search_version + build_set = [] + try: + temp_list = list(s_name_set) + for input_name_li in [temp_list[i:i + 900] for i in range(0, len(temp_list), 900)]: + sql_com = text(""" + SELECT DISTINCT + src.NAME AS search_name, + src.version AS search_version, + bin_pack.src_name AS source_name, + bin_provides.pkgKey AS bin_id, + src_requires.NAME AS req_name, + bin_pack.version AS version, + bin_pack.NAME AS bin_name + FROM + ( SELECT pkgKey, NAME, version FROM src_pack WHERE {} ) src + LEFT JOIN src_requires ON src.pkgKey = src_requires.pkgKey + LEFT JOIN bin_provides ON bin_provides.NAME = src_requires.NAME + LEFT JOIN bin_pack ON bin_pack.pkgKey = bin_provides.pkgKey; + """.format(literal_column("name").in_(input_name_li))) + res = data_base.session.execute( + sql_com, + {'name_{}'.format(i): v + for i, v in enumerate(input_name_li, 1)} + ).fetchall() + + build_set.extend(res) + except AttributeError as attr_err: + current_app.logger.error(attr_err) + except SQLAlchemyError as sql_err: + current_app.logger.error(sql_err) + + if not build_set: + continue + + # When processing source package without compilation dependency + get_list = [] + for result in build_set: + get_list.append(result.search_name) + if not result.bin_name and result.req_name: + if result.req_name in provides_not_found: + provides_not_found[result.req_name].append( + [result.search_name, result.search_version, db_name] ) - - build_list.append(obj) - to_remove_obj_index.append(index) - - for i in reversed(to_remove_obj_index): - build_set.pop(i) - - if not build_set: - continue - - build_list.extend([ - return_tuple( - bin_pack.search_name, - bin_pack.source_name, - bin_pack.bin_name, - bin_pack.version, + else: + provides_not_found[result.req_name] = [ + [result.search_name, result.search_version, db_name] + ] + else: + obj = return_tuple( + result.search_name, + result.source_name, + result.bin_name, + result.version, db_name, - bin_pack.search_version - ) for bin_pack in build_set if bin_pack.bin_id and bin_pack.bin_name - ]) - # Component name can't find its binary package name - not_found_binary.update([(bin_pack.search_name, bin_pack.req_name) - for bin_pack in build_set if not bin_pack.bin_id]) - - s_name_set -= {bin_pack.search_name for bin_pack in build_set - if bin_pack.bin_id} - - if not not_found_binary and not s_name_set: - return ResponseCode.SUCCESS, build_list - - for obj in self.get_binary_in_other_database(not_found_binary, db_name): + result.search_version + ) build_list.append(obj) - not_found_binary.clear() - - except AttributeError as attr_error: - current_app.logger.error(attr_error) - except SQLAlchemyError as sql_error: - current_app.logger.error(sql_error) - return ResponseCode.DIS_CONNECTION_DB, None + get_set = set(get_list) + get_list.clear() + s_name_set.symmetric_difference_update(get_set) + if not s_name_set: + build_result = self._get_binary_in_other_database(provides_not_found) + build_list.extend(build_result) + return ResponseCode.SUCCESS, build_list + + if s_name_set: + build_result = self._get_binary_in_other_database(provides_not_found) + build_list.extend(build_result) + for source in s_name_set: + LOGGER.logger.warning("CANNOT FOUND THE source " + source + " in all database") return ResponseCode.SUCCESS, build_list + def binary_search_database_for_first_time(self, binary_name): + """ + Args: + binary_name: a binary package name + + Returns: + The name of the first database + in which the binary package appears according to priority + If it does not exist or exception occurred , return 'NOT FOUND' + + """ + try: + for db_name, data_base in self.db_object_dict.items(): + if data_base.session.query( + exists().where(bin_pack.name == binary_name) + ).scalar(): + return db_name + except AttributeError as attr_err: + current_app.logger.error(attr_err) + except SQLAlchemyError as sql_err: + current_app.logger.error(sql_err) + + return 'NOT FOUND' + def db_priority(): """ diff --git a/packageship/packageship/application/apps/package/function/self_depend.py b/packageship/packageship/application/apps/package/function/self_depend.py index e63d97c77d09fe5b43e3d17a1628b55d091039ec..54def4496d5b078b6a274c300ed37abfe0a59ccd 100644 --- a/packageship/packageship/application/apps/package/function/self_depend.py +++ b/packageship/packageship/application/apps/package/function/self_depend.py @@ -213,14 +213,14 @@ class SelfDepend(): continue if key not in self.binary_dict.dictionary and values[0] != 'source': self.binary_dict.dictionary[key] = copy.deepcopy(values) - if self.withsubpack == 1: - source_name = values[ListNode.SOURCE_NAME] - if not source_name: - LOGGER.logger.warning("source name is None") - if source_name and source_name not in self.source_dicts.dictionary: - self.source_dicts.append_src(key=source_name, - dbname=values[ListNode.DBNAME], - version=values[ListNode.VERSION]) + source_name = values[ListNode.SOURCE_NAME] + if not source_name: + LOGGER.logger.warning("source name is None") + if source_name and source_name not in self.source_dicts.dictionary: + self.source_dicts.append_src(key=source_name, + dbname=values[ListNode.DBNAME], + version=values[ListNode.VERSION]) + if self.withsubpack == 1: self.search_subpack_list.append(source_name) elif key in self.binary_dict.dictionary: self.binary_dict.update_value(key=key, parent_list=values[ListNode.PARENT_LIST]) diff --git a/packageship/packageship/application/apps/package/url.py b/packageship/packageship/application/apps/package/url.py index 5a08213485915ab4762a6aa7adca453bfa47e89f..873cd7931ac4d53c64267c9f24b1db7763ae3cea 100644 --- a/packageship/packageship/application/apps/package/url.py +++ b/packageship/packageship/application/apps/package/url.py @@ -10,7 +10,7 @@ urls = [ # Query and update a package info - (view.SinglePack, '/packages/findByPackName', + (view.SinglePack, '/packages/packageInfo', {'query': ('GET'), 'write': ('PUT')}), # Query a package's install depend(support querying in one or more databases) diff --git a/packageship/packageship/application/apps/package/view.py b/packageship/packageship/application/apps/package/view.py index 2058738f7738dfce5c17d17b4a8ef1e36889b879..cdf32e4d37a21d090d58fba1a169f7d65154d6f7 100644 --- a/packageship/packageship/application/apps/package/view.py +++ b/packageship/packageship/application/apps/package/view.py @@ -19,10 +19,9 @@ from packageship.libs.exception import DataMergeException from packageship.libs.log import Log from packageship.system_config import DATABASE_FILE_INFO from .function.constants import ResponseCode -from .function.packages import get_packages -from .function.packages import update_single_package -from .function.packages import update_maintaniner_info -from .function.packages import get_single_package +from .function.packages import get_all_packages +from .function.packages import _update_package_info +from .function.packages import get_single from .function.searchdb import db_priority from .serialize import PackagesSchema from .serialize import GetpackSchema @@ -52,25 +51,27 @@ class Packages(Resource): def get(self): """ - Description: Get all package info from a database + Get all package info from a database + Args: dbName: Data path name, not required parameter Returns: - { - "code": "", - "data": [ - { + for + example:: + { + "code": "", + "data": [{ "dbname": "", - "downloadURL": "", "license": "", - "maintainer": , - "maintainlevel": , + "maintainlevel":, + "maintaniner": , + "rpm_packager": "", "sourceName": "", "sourceURL": "", "version": "" - }, - "msg": "" - } + }], + "msg": "" + } Raises: DisconnectionError: Unable to connect to database exception AttributeError: Object does not have this property @@ -86,34 +87,19 @@ class Packages(Resource): dbname = data.get("dbName", None) # Call method to query try: - dbpreority = db_priority() - if dbpreority is None: - return jsonify( - ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND) - ) - if not dbname: - response = [] - for dbname in dbpreority: - query_result = get_packages(dbname) - for item in query_result: - response.append(item) - return jsonify( - ResponseCode.response_json(ResponseCode.SUCCESS, response) - ) - if dbname not in dbpreority: - return jsonify( - ResponseCode.response_json(ResponseCode.DB_NAME_ERROR) - ) - response = get_packages(dbname) - return jsonify( - ResponseCode.response_json(ResponseCode.SUCCESS, response) - ) + response = get_all_packages(dbname) + return response # Database queries data and catches exceptions except DisconnectionError as dis_connection_error: current_app.logger.error(dis_connection_error) return jsonify( ResponseCode.response_json( ResponseCode.DIS_CONNECTION_DB)) + except (AttributeError, TypeError, Error) as attribute_error: + current_app.logger.error(attribute_error) + return jsonify( + ResponseCode.response_json( + ResponseCode.PACK_NAME_NOT_FOUND)) class SinglePack(Resource): @@ -125,29 +111,29 @@ class SinglePack(Resource): def get(self): """ - description: Searching a package info + Searching a package info + Args: dbName: Database name, not required parameter sourceName: Source code package name, must pass Returns: - { - "code": "", - "data": [ + for + examples:: { - "buildDep": [], - "dbname": "", - "downloadURL": "", - "license": "", - "maintainer": "", - "maintainlevel": "", - "sourceName": "", - "sourceURL": "", - "subpack": {}, - "version": "" - } - ], - "msg": "" - } + "code": "", + "data": [{ + "buildDep": [], + "dbname": "", + "license": "", + "maintainlevel": "", + "maintaniner": "", + "rpm_packager": "", + "sourceName": "", + "sourceURL": "", + "subpack": { }, + "version": ""}], + "msg": "" + } Raises: DisconnectionError: Unable to connect to database exception AttributeError: Object does not have this property @@ -166,29 +152,8 @@ class SinglePack(Resource): # Call method to query try: - dbpreority = db_priority() - if db_priority is None: - return jsonify( - ResponseCode.response_json(ResponseCode.FILE_NOT_FOUND) - ) - if not dbname: - response = [] - for dbname in dbpreority: - query_result = get_single_package(dbname, sourcename) - response.append(query_result) - return jsonify( - ResponseCode.response_json(ResponseCode.SUCCESS, response) - ) - - # Database queries data and catches exceptions - if dbname not in dbpreority: - return jsonify( - ResponseCode.response_json(ResponseCode.DB_NAME_ERROR) - ) - response = get_single_package(dbname, sourcename) - return jsonify( - ResponseCode.response_json(ResponseCode.SUCCESS, [response]) - ) + response = get_single(dbname, sourcename) + return response except DisconnectionError as dis_connection_error: current_app.logger.error(dis_connection_error) abnormal = ResponseCode.DIS_CONNECTION_DB @@ -201,18 +166,21 @@ class SinglePack(Resource): def put(self): """ - Description: update a package info, + update a package info, + Args: dbName: Database name,Parameters are required sourceName: The name of the source code package. Parameters are required maintainer: Maintainer, parameter not required maintainlevel: Maintenance level, parameter not required Returns: - { - "code": "", - "data": "", - "msg": "" - } + for + example:: + { + "code": "", + "data": "", + "msg": "" + } Raises: DisconnectionError: Unable to connect to database exception AttributeError: Object does not have this property @@ -242,24 +210,24 @@ class SinglePack(Resource): ResponseCode.response_json(ResponseCode.DB_NAME_ERROR) ) # Database queries data and catches exceptions + try: - update_single_package( - sourcename, dbname, maintainer, maintain_level) - update_maintaniner_info( + result_data = _update_package_info( sourcename, dbname, maintainer, maintain_level) + if result_data is False: + return jsonify( + ResponseCode.response_json( + ResponseCode.PACK_NAME_NOT_FOUND)) return jsonify( - ResponseCode.response_json(ResponseCode.SUCCESS) - ) + ResponseCode.response_json(ResponseCode.SUCCESS)) except DisconnectionError as dis_connection_error: current_app.logger.error(dis_connection_error) - return jsonify( - ResponseCode.response_json( - ResponseCode.DIS_CONNECTION_DB)) + abnormal = ResponseCode.DIS_CONNECTION_DB except (AttributeError, TypeError, Error) as attri_error: current_app.logger.error(attri_error) - return jsonify( - ResponseCode.response_json(ResponseCode.PACK_NAME_NOT_FOUND) - ) + abnormal = ResponseCode.CONNECT_DB_ERROR + if abnormal is not None: + return jsonify(ResponseCode.response_json(abnormal)) class InstallDepend(Resource): @@ -271,12 +239,13 @@ class InstallDepend(Resource): def post(self): """ - Description: Query a package's install depend(support - querying in one or more databases) - input: + Query a package's install depend(support + querying in one or more databases) + + Args: binaryName dbPreority: the array for database preority - return: + Returns: resultDict{ binary_name: //binary package name [ @@ -347,18 +316,21 @@ class BuildDepend(Resource): def post(self): """ - Description: Query a package's build depend and - build depend package's install depend - (support querying in one or more databases) + Query a package's build depend and + build depend package's install depend + (support querying in one or more databases) + Args: sourceName :name of the source package dbPreority:the array for database preority Returns: - { - "code": "", - "data": "", - "msg": "" - } + for + example:: + { + "code": "", + "data": "", + "msg": "" + } Raises: """ schema = BuildDependSchema() @@ -409,8 +381,9 @@ class SelfDepend(Resource): def post(self): """ - Description: Query a package's all dependencies including install and build depend - (support quering a binary or source package in one or more databases) + Query a package's all dependencies including install and build depend + (support quering a binary or source package in one or more databases) + Args: packageName:package name packageType: source/binary @@ -418,12 +391,13 @@ class SelfDepend(Resource): withSubpack: 0/1 dbPreority:the array for database preority Returns: - { - "code": "", - "data": "", - "msg": "" - } - Raises: + for + example:: + { + "code": "", + "data": "", + "msg": "" + } """ schema = SelfDependSchema() @@ -482,25 +456,26 @@ class BeDepend(Resource): def post(self): """ - description: Query a package's all dependencies including - be installed and built depend + Query a package's all dependencies including + be installed and built depend + Args: packageName:package name withSubpack: 0/1 dbname:database name Returns: - resultList[ - restult[ - binaryName: - srcName: - dbName: - type: beinstall or bebuild, which depend on the function - childNode: the binary package name which is the be built/installed - depend for binaryName + for + example:: + resultList[ + restult[ + binaryName: + srcName: + dbName: + type: beinstall or bebuild, which depend on the function + childNode: the binary package name which is the be built/installed + depend for binaryName + ] ] - ] - exception: - changeLog: """ schema = BeDependSchema() data = request.get_json() @@ -542,20 +517,22 @@ class Repodatas(Resource): def get(self): """ - description: get all database - Args: + get all database + Returns: - { - "code": "", - "data": [ - { - "database_name": "", - "priority": "", - "status": "" - } - ], - "msg": "" - } + for + example:: + { + "code": "", + "data": [ + { + "database_name": "", + "priority": "", + "status": "" + } + ], + "msg": "" + } Raises: FileNotFoundError: File not found exception TypeError: Exception of wrong type @@ -583,14 +560,16 @@ class Repodatas(Resource): def delete(self): """ - description: get all database - Args: + get all database + Returns: - { - "code": "", - "data": "", - "msg": "" - } + for + example:: + { + "code": "", + "data": "", + "msg": "" + } Raises: FileNotFoundError: File not found exception, TypeError: Exception of wrong type @@ -614,7 +593,10 @@ class Repodatas(Resource): ) try: drop_db = InitDataBase() - drop_db.delete_db(db_name) + del_result = drop_db.delete_db(db_name) + if del_result is False: + return jsonify( + ResponseCode.response_json(ResponseCode.DELETE_DB_ERROR)) return jsonify( ResponseCode.response_json(ResponseCode.SUCCESS) ) @@ -634,14 +616,16 @@ class InitSystem(Resource): def post(self): """ - description: InitSystem - Args: + InitSystem + Returns: - { - "code": "", - "data": "", - "msg": "" - } + for + example:: + { + "code": "", + "data": "", + "msg": "" + } Raises: ContentNoneException: Unable to connect to the exception of the database DisconnectionError:Exception connecting to database @@ -686,4 +670,9 @@ class InitSystem(Resource): abnormal = ResponseCode.FAILED_CREATE_DATABASE_TABLE if abnormal is not None: return jsonify(ResponseCode.response_json(abnormal)) + db_list = db_priority() + if db_list is None: + return jsonify( + ResponseCode.response_json( + ResponseCode.FAILED_CREATE_DATABASE_TABLE)) return jsonify(ResponseCode.response_json(ResponseCode.SUCCESS)) diff --git a/packageship/packageship/application/initsystem/data_import.py b/packageship/packageship/application/initsystem/data_import.py index 5d0dd0e6766ef2169dfd927679310abcba54a508..03f1e7eab62ac88fe85f2f4d80f602d6383a7a2a 100644 --- a/packageship/packageship/application/initsystem/data_import.py +++ b/packageship/packageship/application/initsystem/data_import.py @@ -200,7 +200,7 @@ class InitDataBase(): raise ContentNoneException( 'The path to the sqlite file in the database initialization configuration \ is incorrect ') - if os.path.exists(src_db_file) or os.path.exists(bin_db_file): + if not os.path.exists(src_db_file) or not os.path.exists(bin_db_file): raise FileNotFoundError("sqlite file {src} or {bin} does not exist, please \ check and try again".format(src=src_db_file, bin=bin_db_file)) # 3. Obtain temporary source package files and binary package files diff --git a/packageship/packageship/pkgship.py b/packageship/packageship/pkgship.py index 95b16486a5032ecc0fdb90eef5080afea31da584..290ac3cb598e26481d585cae19497ed67109ae19 100644 --- a/packageship/packageship/pkgship.py +++ b/packageship/packageship/pkgship.py @@ -621,7 +621,7 @@ class UpdatePackageCommand(PkgshipCommand): Raises: ConnectionError: Request connection error """ - _url = self.write_host + '/packages/findByPackName' + _url = self.write_host + '/packages/packageInfo' try: response = requests.put( _url, data=json.dumps({'sourceName': params.packagename, @@ -1196,7 +1196,7 @@ class SingleCommand(PkgshipCommand): ConnectionError: requests connection error """ _url = self.read_host + \ - '/packages/findByPackName?dbName={db_name}&sourceName={packagename}' \ + '/packages/packageInfo?dbName={db_name}&sourceName={packagename}' \ .format(db_name=params.db, packagename=params.packagename) try: response = requests.get(_url) diff --git a/packageship/packageship/pkgshipd b/packageship/packageship/pkgshipd index a6d2847e7ac5be7d0d0b9970df12f08fc06b9533..a2e39a4f1fbbd883d272313905bbc94b47db0916 100755 --- a/packageship/packageship/pkgshipd +++ b/packageship/packageship/pkgshipd @@ -20,7 +20,8 @@ function create_config_file(){ buffer_size=$(get_config "$service" "buffer-size") http_timeout=$(get_config "$service" "http-timeout") harakiri=$(get_config "$service" "harakiri") - wsgi_file_path=$(find /usr/lib/ -name "packageship") + wsgi_file_path=$(find /usr/lib/ -name "packageship" | head -n 1) + echo "[INFO] run packageship under path: $wsgi_file_path" if [ $service = "manage" -o $service = "all" ];then write_port=$(get_config "$service" "write_port") write_ip_addr=$(get_config "$service" "write_ip_addr") diff --git a/packageship/pkgship.spec b/packageship/pkgship.spec index c85191d2d299f69958b689bbd9e4d00893d0a79c..7ad4f21ab9a039933c562edf33f7df1435e0fd9f 100644 --- a/packageship/pkgship.spec +++ b/packageship/pkgship.spec @@ -10,7 +10,7 @@ BuildArch: noarch Requires: python3-pip python3-flask-restful python3-flask python3 python3-pyyaml Requires: python3-sqlalchemy python3-prettytable python3-requests -#Requires: pyinstaller python3-flask-session python3-flask-script marshmallow uwsig +Requires: python-PyInstaller python3-flask-session python3-flask-script python3-marshmallow uwsgi %description Pkgship implements rpm package dependence ,maintainer, patch query and so no.