From 255af54054f0e8a1f9b033be0b4e098c2a784ea6 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 03:11:01 +0000 Subject: [PATCH 01/11] fix netpbm.yaml error --- upstream-info/automake.yaml | 2 +- upstream-info/netpbm.yaml | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/upstream-info/automake.yaml b/upstream-info/automake.yaml index d2b11114..9893a86f 100644 --- a/upstream-info/automake.yaml +++ b/upstream-info/automake.yaml @@ -1,4 +1,4 @@ version_control: git -src_repo: https://git.savannah.gnu.org/git/automake.git +src_repo: https://git.savannah.gnu.org/git/automake.git tag_prefix: "^v" separator: "." diff --git a/upstream-info/netpbm.yaml b/upstream-info/netpbm.yaml index f5c5e4fc..fc6523cf 100644 --- a/upstream-info/netpbm.yaml +++ b/upstream-info/netpbm.yaml @@ -1,6 +1,5 @@ version_control: svn -src_repo: http://svn.code.sf.net/p/netpbm/code/release_number +src_repo: https://svn.code.sf.net/p/netpbm/code tag_dir: release_number tag_prefix: ^v separator: . - \ No newline at end of file -- Gitee From dbd244daa487650e0a81b0a52ed25e34417758d3 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 03:14:21 +0000 Subject: [PATCH 02/11] import psrtool --- advisors/psrtool.py | 81 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100755 advisors/psrtool.py diff --git a/advisors/psrtool.py b/advisors/psrtool.py new file mode 100755 index 00000000..18e91084 --- /dev/null +++ b/advisors/psrtool.py @@ -0,0 +1,81 @@ +#!/usr/bin/python3 +#****************************************************************************** +# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved. +# licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR +# PURPOSE. +# See the Mulan PSL v2 for more details. +# +# This is a helper script provides a way to query or list packages in specific SIG. +# This script was inspired by previous work from @love_hangzhou +# ******************************************************************************/ + +import yaml, argparse, re, os + +def list_packages(sigs, sig_name): + for sig in sigs: + if sig['name'].lower() == sig_name.lower(): + return sig['repositories'] + + +def list_sigs(sigs): + result = [] + for sig in sigs: + result.append(sig['name']) + return result + + +def package_to_sigs(sigs, pkg_names): + result = {} + for pkg in pkg_names: + for sig in sigs: + repos = sig['repositories'] + for repo in repos: + searchObj = re.search(pkg.lower(), repo.lower(), 0) + if searchObj: + result[repo] = sig['name'] + return result + + +def print_list(l): + for i in l: + print(i) + +def print_dict(d): + for k in d.keys(): + print(k + ": " + d[k]) + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("-l", "--list", default=False, nargs='?', + help="List packages managed by the specific SIG, or list all SIGs instead") + group.add_argument("-q", "--query_packages", default=False, nargs='+', + help="Query which SIG manage the specific package") + parser.add_argument("-f", "--yaml", default="sig/sigs.yaml", + help="Local path of sigs.yaml file") + + args = parser.parse_args() + + try: + with open(args.yaml, 'r') as f: + sigs = yaml.load(f, Loader=yaml.Loader)['sigs'] + except: + print("Failed to load information from %s" % args.yaml) + parser.print_help() + exit(1) + + if args.list: + print_list(list_packages(sigs, args.list)) + elif args.list is None: + print_list(list_sigs(sigs)) + elif args.query_packages: + print_dict(package_to_sigs(sigs, args.query_packages)) + else: + pass + exit(0) + -- Gitee From 9a5d3a2f344525f35b3aa82cf292571005d3e62d Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 03:17:17 +0000 Subject: [PATCH 03/11] initial thoughts on openeuler review --- advisors/oe_review | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100755 advisors/oe_review diff --git a/advisors/oe_review b/advisors/oe_review new file mode 100755 index 00000000..e5ca6ad4 --- /dev/null +++ b/advisors/oe_review @@ -0,0 +1,37 @@ +#!/usr/bin/python3 +#****************************************************************************** +# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved. +# licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR +# PURPOSE. +# See the Mulan PSL v2 for more details. +# ******************************************************************************/ + +import argparse +import subprocess +import os + +if __name__ == "__main__": + pars = argparse.ArgumentParser() + pars.add_argument("-p", "--pull", type=str, help="Number ID of Pull Request", required=True) + pars.add_argument("repo", help="Repository to be reviewed") + pars.add_argument("-r", "--reuse", help="Reuse current local git dirctory", action="store_true") + + args = pars.parse_args() + + gitee_url_prefix = "git@gitee.com:src-openeuler/" + + if not args.reuse: + subprocess.call(["git", "clone", gitee_url_prefix + args.repo]) + rs = args.repo.split('/') + os.chdir(rs[1]) + + subprocess.call(["git", "fetch", gitee_url_prefix + args.repo, "pull/{n}/head:pr_{n}".format(n=args.pull)]) + + print("You are reviewing {repo} pull {n}".format(repo=args.repo, n=args.pull)) + print("Don't forget to try to merge master branch") + -- Gitee From 80df161c0b1f40e623bcc787d56b3d360201a0a2 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 03:48:34 +0000 Subject: [PATCH 04/11] fix corner case where version info is empty --- advisors/check_upstream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/advisors/check_upstream.py b/advisors/check_upstream.py index 22e3ccc5..873c614b 100755 --- a/advisors/check_upstream.py +++ b/advisors/check_upstream.py @@ -60,7 +60,7 @@ def clean_tags(tags, info): separator_regex = re.compile(info["seperator"]) result_list = [separator_regex.sub(".", x) for x in result_list] - result_list = [x for x in result_list if x[0].isdigit()] + result_list = [x for x in result_list if len(x) > 0 and x[0].isdigit()] return result_list -- Gitee From 1c6acdd4d50c57e7dd2920fadf80e919e76840d1 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 03:50:55 +0000 Subject: [PATCH 05/11] fix unixODBC.yaml --- upstream-info/unixODBC.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/upstream-info/unixODBC.yaml b/upstream-info/unixODBC.yaml index a724d4d5..b6ec5934 100644 --- a/upstream-info/unixODBC.yaml +++ b/upstream-info/unixODBC.yaml @@ -1,4 +1,4 @@ version_control: svn -src_repo: https://svn.code.sf.net/p/unixodbc/code/ -tag_prefix: 2.3.0 +src_repo: https://svn.code.sf.net/p/unixodbc/code +tag_prefix: "" seperator: . -- Gitee From 5caa9cc8e7d37579fc1533a98625c200523db107 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 04:03:30 +0000 Subject: [PATCH 06/11] cleanup code style --- advisors/psrtool.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/advisors/psrtool.py b/advisors/psrtool.py index 18e91084..e7f463ba 100755 --- a/advisors/psrtool.py +++ b/advisors/psrtool.py @@ -16,23 +16,23 @@ import yaml, argparse, re, os -def list_packages(sigs, sig_name): - for sig in sigs: +def list_packages(s, sig_name): + for sig in s: if sig['name'].lower() == sig_name.lower(): return sig['repositories'] -def list_sigs(sigs): +def list_sigs(s): result = [] - for sig in sigs: + for sig in s: result.append(sig['name']) return result -def package_to_sigs(sigs, pkg_names): +def package_to_sigs(s, pkg_names): result = {} for pkg in pkg_names: - for sig in sigs: + for sig in s: repos = sig['repositories'] for repo in repos: searchObj = re.search(pkg.lower(), repo.lower(), 0) @@ -64,7 +64,7 @@ if __name__ == "__main__": try: with open(args.yaml, 'r') as f: sigs = yaml.load(f, Loader=yaml.Loader)['sigs'] - except: + except IOError: print("Failed to load information from %s" % args.yaml) parser.print_help() exit(1) -- Gitee From ef1fc17ee6a536a22bf67fd4b60849101a50ba5e Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Sat, 22 Aug 2020 05:03:35 +0000 Subject: [PATCH 07/11] clean up pylint hints --- advisors/psrtool.py | 77 ++++++++++++++++++++++++++++++--------------- 1 file changed, 52 insertions(+), 25 deletions(-) diff --git a/advisors/psrtool.py b/advisors/psrtool.py index e7f463ba..84dcbd17 100755 --- a/advisors/psrtool.py +++ b/advisors/psrtool.py @@ -10,60 +10,85 @@ # PURPOSE. # See the Mulan PSL v2 for more details. # -# This is a helper script provides a way to query or list packages in specific SIG. -# This script was inspired by previous work from @love_hangzhou # ******************************************************************************/ +""" +This is a helper script provides a way to query or list packages in specific SIG. +This script was inspired by previous work from @love_hangzhou +""" -import yaml, argparse, re, os +import re +import argparse +import yaml -def list_packages(s, sig_name): - for sig in s: +def list_packages(sigs, sig_name): + """ + List all packages managed by specific SIG + """ + for sig in sigs: if sig['name'].lower() == sig_name.lower(): return sig['repositories'] + return [] -def list_sigs(s): +def list_sigs(sigs): + """ + List all current SIGs + """ result = [] - for sig in s: + for sig in sigs: result.append(sig['name']) return result -def package_to_sigs(s, pkg_names): +def package_to_sigs(sigs, pkg_names): + """ + Query which SIG manages the packages. + """ result = {} for pkg in pkg_names: - for sig in s: + for sig in sigs: repos = sig['repositories'] for repo in repos: - searchObj = re.search(pkg.lower(), repo.lower(), 0) - if searchObj: + search_obj = re.search(pkg.lower(), repo.lower(), 0) + if search_obj: result[repo] = sig['name'] return result -def print_list(l): - for i in l: +def print_list(lista): + """ + Helper for print list + """ + for i in lista: print(i) -def print_dict(d): - for k in d.keys(): - print(k + ": " + d[k]) -if __name__ == "__main__": +def print_dict(dicta): + """ + Helper for print dictionary + """ + for k in dicta.keys(): + print(k + ": " + dicta[k]) + + +def main(): + """ + Main entrance of functionality + """ parser = argparse.ArgumentParser() group = parser.add_mutually_exclusive_group(required=True) - group.add_argument("-l", "--list", default=False, nargs='?', - help="List packages managed by the specific SIG, or list all SIGs instead") - group.add_argument("-q", "--query_packages", default=False, nargs='+', - help="Query which SIG manage the specific package") - parser.add_argument("-f", "--yaml", default="sig/sigs.yaml", - help="Local path of sigs.yaml file") + group.add_argument("-l", "--list", default=False, nargs='?', + help="List packages managed by the specific SIG, or list all SIGs instead") + group.add_argument("-q", "--query_packages", default=False, nargs='+', + help="Query which SIG manage the specific package") + parser.add_argument("-f", "--yaml", default="sig/sigs.yaml", + help="Local path of sigs.yaml file") args = parser.parse_args() try: - with open(args.yaml, 'r') as f: - sigs = yaml.load(f, Loader=yaml.Loader)['sigs'] + with open(args.yaml, 'r') as yaml_file: + sigs = yaml.load(yaml_file, Loader=yaml.Loader)['sigs'] except IOError: print("Failed to load information from %s" % args.yaml) parser.print_help() @@ -79,3 +104,5 @@ if __name__ == "__main__": pass exit(0) +if __name__ == "__main__": + main() -- Gitee From 2b24e03afbc666d51036c5b43a1f02776a29cc69 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Mon, 24 Aug 2020 07:20:22 +0000 Subject: [PATCH 08/11] fix pylint issue of oe_review --- advisors/oe_review | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/advisors/oe_review b/advisors/oe_review index e5ca6ad4..5ceb938e 100755 --- a/advisors/oe_review +++ b/advisors/oe_review @@ -10,12 +10,17 @@ # PURPOSE. # See the Mulan PSL v2 for more details. # ******************************************************************************/ - +""" +Review tool for openEuler submission +""" +import os import argparse import subprocess -import os -if __name__ == "__main__": +def main(): + """ + Main entrance of the functionality + """ pars = argparse.ArgumentParser() pars.add_argument("-p", "--pull", type=str, help="Number ID of Pull Request", required=True) pars.add_argument("repo", help="Repository to be reviewed") @@ -27,11 +32,14 @@ if __name__ == "__main__": if not args.reuse: subprocess.call(["git", "clone", gitee_url_prefix + args.repo]) - rs = args.repo.split('/') - os.chdir(rs[1]) - - subprocess.call(["git", "fetch", gitee_url_prefix + args.repo, "pull/{n}/head:pr_{n}".format(n=args.pull)]) + os.chdir(args.repo.split('/')[1]) + + subprocess.call(["git", "fetch", + gitee_url_prefix + args.repo, + "pull/{n}/head:pr_{n}".format(n=args.pull)]) print("You are reviewing {repo} pull {n}".format(repo=args.repo, n=args.pull)) print("Don't forget to try to merge master branch") +if __name__ == "__main__": + main() -- Gitee From 2aacfdfec13e0306fc623d612e8368e328d070bb Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Mon, 24 Aug 2020 07:51:20 +0000 Subject: [PATCH 09/11] cleanup pylint warning (mostly) --- advisors/check_abi.py | 140 ++++++++++++++++++++++-------------------- 1 file changed, 73 insertions(+), 67 deletions(-) diff --git a/advisors/check_abi.py b/advisors/check_abi.py index 45d9e6be..afe4b4c8 100755 --- a/advisors/check_abi.py +++ b/advisors/check_abi.py @@ -14,9 +14,10 @@ # ******************************************************************************/ """ -(1) This script is used to check the ABI changes between the old +(1) This script is used to check the ABI changes between the old and new versions of dynamic libraries. - The merged result on difference is saved in the xxx_all_abidiff.out file in the working directory + The merged result on difference is saved in the xxx_all_abidiff.out file in the working + directory. default path: /var/tmp/xxx_all_abidiff.out (2) This script depends on abidiff from libabigail package. @@ -31,7 +32,6 @@ import subprocess import sys import os import logging -import io import shutil import tempfile @@ -42,41 +42,44 @@ def parse_command_line(): parser.add_argument("-d", "--work_path", default="/var/tmp", nargs="?", help="The work path to put rpm2cpio files and results" " (e.g. /home/tmp_abidiff default: /var/tmp/)") - parser.add_argument("-a", "--show_all_info", action="store_true", default=False, + parser.add_argument("-a", "--show_all_info", action="store_true", default=False, help="show all infos includ changes in member name") parser.add_argument("-v", "--verbose", action="store_true", default=False, - help="Show additional information") + help="Show additional information") subparser = parser.add_subparsers(dest='command_name', - help="Compare between two RPMs or two .so files") + help="Compare between two RPMs or two .so files") rpm_parser = subparser.add_parser('compare_rpm', help="Compare between two RPMs") rpm_parser.add_argument("-r", "--rpms", required=True, nargs=2, - metavar=('old_rpm', 'new_rpm'), - help="Path or URL of both the old and new RPMs") - rpm_parser.add_argument("-d", "--debuginfo_rpm", nargs=2, - metavar=('old_debuginfo_rpm', 'new_debuginfo_rpm'), required=False, - help = "Path or URL of both the old and new debuginfo RPMs, corresponding to compared RPMs.") + metavar=('old_rpm', 'new_rpm'), + help="Path or URL of both the old and new RPMs") + rpm_parser.add_argument("-d", "--debuginfo_rpm", nargs=2, + metavar=('old_debuginfo_rpm', 'new_debuginfo_rpm'), + required=False, + help="Path or URL of both the old and new debuginfo RPMs," + "corresponding to compared RPMs.") rpm_parser.set_defaults(func=process_with_rpm) so_parser = subparser.add_parser('compare_so', help="Compare between two .so files") so_parser.add_argument("-s", "--sos", required=True, nargs=2, - metavar=('old_so', 'new_so'), - help="Path or URL of both the old and new .so files") + metavar=('old_so', 'new_so'), + help="Path or URL of both the old and new .so files") so_parser.add_argument("-f", "--debuginfo_path", nargs=2, required=False, - metavar=('old_debuginfo_path', 'new_debuginfo_path'), - help = "Path or URL of both the old and new debuginfo files, corresponding to compared .so files.") + metavar=('old_debuginfo_path', 'new_debuginfo_path'), + help="Path or URL of both the old and new debuginfo files," + "corresponding to compared .so files.") so_parser.set_defaults(func=process_with_so) - + config = parser.parse_args() - if config.command_name == None: + if config.command_name is None: parser.print_help() sys.exit(0) else: return config - + def list_so_files(path): """ Generate a list of all .so files in the directory. @@ -85,8 +88,8 @@ def list_so_files(path): # we cannot rely on number suffix for some .so files use complex version scheme. exception_list = ["hmac"] so_files = set() - for dirpath, dirnames, files in os.walk(path): - for filename in files: + for dirpath, _, files in os.walk(path): + for filename in files: fp = os.path.join(dirpath, filename) if os.path.islink(fp): continue @@ -124,18 +127,19 @@ def find_all_so_file(path1, path2): prev_left = previous_sos - prev_matched curr_left = current_sos - curr_matched - if len(prev_left) != 0: + if prev_left: logging.info("Unmatched .so file in previous version") logging.info("Usually means deleted .so in current version") logging.info("%s\n", prev_left) - if len(curr_left) != 0: + if curr_left: logging.info("Unmatched .so file in current version") logging.info("Usually means newly added .so in current version") logging.info("%s\n", curr_left) logging.debug("mapping of .so files:%s\n", all_so_pair) return all_so_pair - + + def make_abi_path(work_path, abipath): """ Get the path to put so file from rpm @@ -149,20 +153,20 @@ def make_abi_path(work_path, abipath): def get_rpm_path(rpm_url, dest): - """Get the path of rpm package""" + """Get the path of rpm package""" + rpm_path = "" if os.path.isfile(rpm_url): - abs_rpmpath = os.path.abspath(rpm_url) - logging.debug("rpm exists:%s", abs_rpmpath) - return abs_rpmpath + rpm_path = os.path.abspath(rpm_url) + logging.debug("rpm exists:%s", rpm_path) else: rpm_name = os.path.basename(rpm_url) rpm_path = os.path.join(dest, rpm_name) logging.debug("downloading %s......", rpm_name) - subprocess.call(["curl", rpm_url, "-L", - "--connect-timeout", "10", - "--max-time", "600", - "-sS", "-o", rpm_path]) - return rpm_path + subprocess.call(["curl", rpm_url, "-L", + "--connect-timeout", "10", + "--max-time", "600", + "-sS", "-o", rpm_path]) + return rpm_path def do_rpm2cpio(rpm2cpio_path, rpm_file): """ @@ -184,7 +188,7 @@ def merge_all_abidiff_files(all_abidiff_files, work_path, rpm_base_name): if os.path.exists(merged_file): subprocess.run("rm -rf {}".format(merged_file), shell=True) - ofile = open(merged_file, "a+") + ofile = open(merged_file, "a+") for diff_file in all_abidiff_files: diff_file_name = os.path.basename(diff_file) ofile.write("---------------diffs in {}:----------------\n".format(diff_file_name)) @@ -197,14 +201,14 @@ def do_abidiff(config, all_so_pair, work_path, base_name, debuginfo_path): """ Exec the abidiff and write result to files. return the abidiff returncode. - """ - if len(all_so_pair) == 0: + """ + if not all_so_pair: logging.info("There are no .so files to compare") sys.exit(0) if debuginfo_path: - logging.debug("old_debuginfo_path:%s\nnew_debuginfo_path:%s", - debuginfo_path[0], debuginfo_path[1]) + logging.debug("old_debuginfo_path:%s\nnew_debuginfo_path:%s", + debuginfo_path[0], debuginfo_path[1]) with_debuginfo = True else: with_debuginfo = False @@ -215,8 +219,9 @@ def do_abidiff(config, all_so_pair, work_path, base_name, debuginfo_path): new_so_file = all_so_pair[old_so_file] logging.debug("begin abidiff between %s and %s", old_so_file, new_so_file) - abidiff_file = os.path.join(work_path, - "{}_{}_abidiff.out".format(base_name, os.path.basename(new_so_file))) + abidiff_file = os.path.join(work_path, + "{}_{}_abidiff.out".format(base_name, + os.path.basename(new_so_file))) so_options = "{} {}".format(old_so_file, new_so_file) @@ -230,43 +235,43 @@ def do_abidiff(config, all_so_pair, work_path, base_name, debuginfo_path): else: debug_options = "" - abidiff_cmd = "abidiff {so_options} {debug_options} {additional_options} > {difffile}".format( - so_options=so_options, - debug_options=debug_options, - additional_options=additional_options, - difffile=abidiff_file) + abidiff_template = "abidiff {so_options} {debug_options} {additional_options} > {difffile}" + abidiff_cmd = abidiff_template.format(so_options=so_options, + debug_options=debug_options, + additional_options=additional_options, + difffile=abidiff_file) ret = subprocess.run(abidiff_cmd, shell=True) - + all_abidiff_files.append(abidiff_file) logging.info("result write in: %s", abidiff_file) return_code |= ret.returncode merged_file = merge_all_abidiff_files(all_abidiff_files, work_path, base_name) logging.info("all results writed in: %s", merged_file) - return return_code + return return_code + - def validate_sos(config): """ Validate the command arguments - """ + """ for so in config.sos: if not os.path.isfile(so) or ".so" not in so: logging.error(f"{so} not exists or not a .so file") - sys.exit(0) + sys.exit(0) if config.debuginfo_path: for d in config.debuginfo_path: if not os.path.exists(d): logging.error(f"{d} not exists") - sys.exit(0) - + sys.exit(0) + def check_result(returncode): """ Check the result of abidiff - """ + """ ABIDIFF_ERROR_BIT = 1 if returncode == 0: logging.info("No ABI differences found.") @@ -275,11 +280,11 @@ def check_result(returncode): else: logging.info("ABI differences found.") - + def process_with_rpm(config): """ Process the file with type of rpm. - """ + """ work_path = config.work_path temp_path = os.path.abspath(tempfile.mkdtemp(dir=work_path)) @@ -289,23 +294,24 @@ def process_with_rpm(config): rpm_path = [get_rpm_path(x[0], x[1]) for x in zip(config.rpms, abi_paths)] logging.debug("rpm_path:%s\n", rpm_path) - [do_rpm2cpio(x[0], x[1]) for x in zip(abi_paths, rpm_path)] + _ = [do_rpm2cpio(x[0], x[1]) for x in zip(abi_paths, rpm_path)] if config.debuginfo_rpm: - debuginfo_rpm_path = [get_rpm_path(x[0], x[1]) for x in zip(config.debuginfo_rpm, abi_paths)] + debuginfo_rpm_path = [get_rpm_path(x[0], x[1]) + for x in zip(config.debuginfo_rpm, abi_paths)] logging.debug("debuginfo_rpm_path:%s\n", debuginfo_rpm_path) - - [do_rpm2cpio(x[0], x[1]) for x in zip(abi_paths, debuginfo_rpm_path)] - + + _ = [do_rpm2cpio(x[0], x[1]) for x in zip(abi_paths, debuginfo_rpm_path)] + os.chdir(temp_path) logging.debug("\n----begin abidiff working in path:%s----", os.getcwd()) - - so_paths = [ os.path.join(x, "usr/lib64") for x in abi_paths ] + + so_paths = [os.path.join(x, "usr/lib64") for x in abi_paths] all_so_pairs = find_all_so_file(so_paths[0], so_paths[1]) - debuginfo_paths = [ os.path.join(x, "usr/lib/debug") for x in abi_paths ] + debuginfo_paths = [os.path.join(x, "usr/lib/debug") for x in abi_paths] rpm_base_name = os.path.basename(rpm_path[0]).split('.')[0] @@ -316,10 +322,10 @@ def process_with_rpm(config): return returncode -def process_with_so(config): +def process_with_so(config): """ Process the file with type of .so. - """ + """ validate_sos(config) work_path = config.work_path all_so_pair = {} @@ -327,7 +333,7 @@ def process_with_so(config): all_so_pair[so_path[0]] = so_path[1] os.chdir(work_path) logging.debug("\n----begin abidiff with .so working in path:%s----", os.getcwd()) - + so_base_name = os.path.basename(so_path[0]).split('.')[0] if config.debuginfo_path: debuginfo_path = list(map(os.path.abspath, config.debuginfo_path)) @@ -338,7 +344,7 @@ def process_with_so(config): check_result(returncode) return returncode - + def main(): """Entry point for check_abi""" config = parse_command_line() @@ -349,6 +355,6 @@ def main(): ret = config.func(config) sys.exit(ret) - + if __name__ == "__main__": main() -- Gitee From 2515236f085b87e8cdfbebe5355b6a124512bf40 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Mon, 24 Aug 2020 08:10:36 +0000 Subject: [PATCH 10/11] clean up pylint warning of oa_upgradable.py --- advisors/oa_upgradable.py | 44 +++++++++++++++++++++++---------------- 1 file changed, 26 insertions(+), 18 deletions(-) mode change 100644 => 100755 advisors/oa_upgradable.py diff --git a/advisors/oa_upgradable.py b/advisors/oa_upgradable.py old mode 100644 new mode 100755 index 78104437..5447e094 --- a/advisors/oa_upgradable.py +++ b/advisors/oa_upgradable.py @@ -2,16 +2,13 @@ """ This is a script to check upgradable information against upstream """ -from pyrpm.spec import Spec, replace_macros - -import yaml -import json -import datetime -import sys import os +import sys import argparse -import urllib.error +from pyrpm.spec import Spec, replace_macros + +import yaml import gitee import check_upstream @@ -22,7 +19,8 @@ def _get_rec_excpt(): """ Get except case of version recommend """ - y_file = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "helper/ver_rec_excpt.yaml")) + y_file = open(os.path.join(os.path.dirname(os.path.abspath(__file__)), + "helper/ver_rec_excpt.yaml")) excpt = yaml.load(y_file, Loader=yaml.Loader) return excpt @@ -31,10 +29,10 @@ def _filter_except(excpts, sources): """ Filter except case in sources """ - for e in excpts: - sources = [s for s in sources if e not in s] + for exp in excpts: + sources = [s for s in sources if exp not in s] return sources - + def get_ver_tags(gt, repo, cwd_path=None): """ @@ -44,7 +42,8 @@ def get_ver_tags(gt, repo, cwd_path=None): try: repo_yaml = open(os.path.join(cwd_path, repo + ".yaml")).read() except FileNotFoundError: - print("WARNING: {pkg}.yaml can't be found in local path: {path}.".format(pkg=repo, path=cwd_path)) + print("WARNING: {pkg}.yaml can't be found in local path: {path}.".format(pkg=repo, + path=cwd_path)) repo_yaml = gt.get_yaml(repo) else: repo_yaml = gt.get_yaml(repo) @@ -84,18 +83,21 @@ def get_ver_tags(gt, repo, cwd_path=None): excpt_list = _get_rec_excpt() if repo in excpt_list: - tags = _filter_except(excpt_list[repo], tags) + tags = _filter_except(excpt_list[repo], tags) return tags -if __name__ == "__main__": +def main(): + """ + Main entrance of the functionality + """ parameters = argparse.ArgumentParser() parameters.add_argument("-p", "--push", action="store_true", - help="Push the version bump as an issue to src-openeuler repository") + help="Push the version bump as an issue to src-openeuler repository") parameters.add_argument("-d", "--default", type=str, default=os.getcwd(), - help="The fallback place to look for YAML information") + help="The fallback place to look for YAML information") parameters.add_argument("repo", type=str, - help="Repository to be checked for upstream version info") + help="Repository to be checked for upstream version info") args = parameters.parse_args() @@ -140,4 +142,10 @@ Please consider upgrading. Yours openEuler Advisor. If you think this is not proper issue, Please visit https://gitee.com/openeuler/openEuler-Advisor. -Issues and feedbacks are welcome.""".format(repo=args.repo, ver=ver_rec.latest_version, cur_ver=cur_version)) +Issues and feedbacks are welcome.""".format(repo=args.repo, + ver=ver_rec.latest_version, + cur_ver=cur_version)) + + +if __name__ == "__main__": + main() -- Gitee From e30d5fe4f275b1f862582ec57e02d44782b273e5 Mon Sep 17 00:00:00 2001 From: Shinwell Hu Date: Mon, 24 Aug 2020 08:13:39 +0000 Subject: [PATCH 11/11] fix tc_statistic issue --- advisors/tc_statistic.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/advisors/tc_statistic.py b/advisors/tc_statistic.py index 89ec1854..c0b9d5a9 100755 --- a/advisors/tc_statistic.py +++ b/advisors/tc_statistic.py @@ -76,7 +76,7 @@ class Advisor(object): if num <= 100: list_all_url = list_all_url + "per_page={num}&page=1".format(num=num) - return self.get_json(url) + return self.get_json(list_all_url) list_all_url = list_all_url + "per_page=100&page=" @@ -128,7 +128,8 @@ if __name__ == "__main__": for t in tc_members: tc_statistic[t] = 0 PRs = adv.get_recent_prs(int(args.number)) - print("Statistic of recent {num} PRs".format(len(PRs)) + print("Statistic of recent {num} PRs".format(num=len(PRs))) + for pr in PRs: commenter = pr["user"]["login"] if commenter in tc_members: @@ -140,5 +141,5 @@ if __name__ == "__main__": tc_statistic[commenter] += 1 for tc in tc_statistic.keys(): - print("{tc} mades {num} comments".format(tc=tc, num=tc_statistic[tc])) + print("{tc} made {num} comments".format(tc=tc, num=tc_statistic[tc])) -- Gitee