diff --git a/tools/rom_ram_analyzer/L0L1/README.md b/tools/rom_ram_analyzer/L0L1/README.md index b82fb565e6169406873f83eca93599a74147c677..ce3da518c03fff74dbf2ebb477895a662fa3cf6d 100644 --- a/tools/rom_ram_analyzer/L0L1/README.md +++ b/tools/rom_ram_analyzer/L0L1/README.md @@ -21,7 +21,7 @@ 前置条件: -1. 获取整个rom_ram_analyzer目录 +1. 获取整个本文件所在的整个目录 1. 对系统进行编译 1. linux平台 1. python3.8及以后 @@ -44,7 +44,4 @@ ## 后续工作 -1. 配置解耦,目前对config.yaml的解析分散在代码各处,不合理 -2. 配置文件按产品优化 -3. 部分log的输出有待优化 -4. hap_pack需要对hap_name进行处理 \ No newline at end of file +1. 部分log的输出有待优化 \ No newline at end of file diff --git a/tools/rom_ram_analyzer/L0L1/pkgs/basic_tool.py b/tools/rom_ram_analyzer/L0L1/pkgs/basic_tool.py index 5711a81320c240b688f4e6c0e00a4eb0a5a3dbcd..febe48256c9ef3290b2313426958ea4e1b549093 100644 --- a/tools/rom_ram_analyzer/L0L1/pkgs/basic_tool.py +++ b/tools/rom_ram_analyzer/L0L1/pkgs/basic_tool.py @@ -18,50 +18,14 @@ import itertools import os import re import glob -import logging from typing import * -# warnings.filterwarnings("always") - - def do_nothing(x: Any) -> Any: return x class BasicTool: - VERSION = 1.0 - - @classmethod - def contains_keywords(cls, wrapper: Any, key_words: tuple) -> bool: - """ - 判断target中是否包含key_words中的元素 - :param wrapper: 可以使用 x in y 语法的y - :param key_words: 待进行判断的关键词 - :return: 标识wrapper中是否包含key_words中元素的一个bool值 - """ - for k in key_words: - if k in wrapper: - return True - return False - - @classmethod - def is_empty_iter(cls, itera: Iterator) -> Tuple[Iterator, bool]: - """ - 判断iterator是否为空,因为会改变原来的iterator,所以需要返回一个和原来iterator相同的iter - :param itera: 迭代器 - :return: 包含原迭代器内容的迭代器,原迭代器是否为空 - """ - itera, peek = itertools.tee(itera) - empty_flag = False - try: - next(peek) - except StopIteration: - empty_flag = True - finally: - ... - return itera, empty_flag - @classmethod def find_files_with_pattern(cls, folder: str, pattern: str = "/**", recursive: bool = True, apply_abs: bool = True, real_path: bool = True, de_duplicate: bool = True, is_sort: bool = True, diff --git a/tools/rom_ram_analyzer/L0L1/pkgs/gn_common_tool.py b/tools/rom_ram_analyzer/L0L1/pkgs/gn_common_tool.py index 8dbc5476ae014fe5a033c53e141098ad1f95c2cd..b112957dbba7cd35b01320025c755062294b12fe 100644 --- a/tools/rom_ram_analyzer/L0L1/pkgs/gn_common_tool.py +++ b/tools/rom_ram_analyzer/L0L1/pkgs/gn_common_tool.py @@ -56,8 +56,8 @@ class GnCommonTool: def is_gn_variable(cls, target: str, quote_processed: bool = False): """ 判断target是否是gn中的变量: - 规则:如果是有引号的模式,则没有引号均认为是变量,有引号的情况下,如有是"$xxx"或${xx}的模式,则认为xxx是变量; - 如果是无引号模式,则只要$开头就认为是变量 + 规则:如果是quote_processed is False,则没有引号均认为是变量,有引号的情况下,如果是"$xxx"或${xx}的模式,则认为xxx是变量; + 如果quote_processed is True,则只要$开头就认为是变量 b = "xxx" c = b c = "${b}" @@ -114,7 +114,6 @@ class GnCommonTool: if val: not_found_count -= 1 var_val_dict[var] = val - # while (not path.endswith(stop_tail)) and not_found_count: while (stop_tail in path) and not_found_count: for v in var_name_tuple: pv = v.strip('"').lstrip("${").rstrip('}') @@ -122,7 +121,6 @@ class GnCommonTool: # 然后排除含有$符的 # 再取第一个 # 最后只取引号内的 - # backup:begin cmd = fr"grep -Ern '{pv} *= *\".*?\"' --include=*.gn* {path} | grep -Ev '\$' " \ r"| head -n 1 | grep -E '\".*\"' -wo" output = BasicTool.execute(cmd, lambda x: x.strip().strip('"')) @@ -135,69 +133,6 @@ class GnCommonTool: path, _ = os.path.split(path) return list(var_val_dict.values()) - @classmethod - def find_variables_in_gn_test(cls, var_name_tuple: tuple, path: str, stop_tail: str = "home", use_cache: bool = False) -> \ - List[str]: - """ - 同时查找多个gn变量的值 - var_name_tuple:变量名的tuple,变量名应是未经过处理后的,如: - xxx - "${xxx}" - "$xxx" - :param var_name_tuple: 待查找的变量名的列表 - :param path: 变量名所在文件的路径 - :param stop_tail: 当path以stop_tail结尾时,停止查找 - :param use_cache: 是否使用缓存 - :return: 变量值的列表 - """ - if os.path.isfile(path): - path, _ = os.path.split(path) - var_val_dict = collections.defaultdict(str) - not_found_count = len(var_name_tuple) - if use_cache: - for var in var_name_tuple: - val = GnCommonTool.__var_val_mem_dict[var] - if val: - not_found_count -= 1 - var_val_dict[var] = val - flag = "${updater_faultloggerd_cfg}" in var_name_tuple[0] - while not path.endswith(stop_tail) and not_found_count: - for v in var_name_tuple: - pv = v.strip('"').lstrip("${").rstrip('}') - # 先直接grep出pv *= *\".*?\"的 - # 然后排除含有$符的 - # 再取第一个 - # 最后只取引号内的 - cmd = fr"grep -Ern '{pv} *=' --include=*.gn* {path}" - cr = BasicTool.execute(cmd) - if not cr: - break - vfile = cr.split('\n')[0].split(':')[0] - with open(vfile, 'r', encoding='utf-8') as f: - output =GnVariableParser.string_parser(pv, f.read()) - if not output: - continue - not_found_count -= 1 - var_val_dict[v] = output - GnCommonTool.__var_val_mem_dict[v] = output - path, _ = os.path.split(path) - return list(var_val_dict.values()) - - @classmethod - def find_variable_in_gn(cls, var_name: str, path: str, stop_tail: str = "home", use_cache: bool = False): - """ - 查找变量的单个值 - :param use_cache: 是否使用cache - :param stop_tail: 结束查找的目录 - :param path: 开始查找的路径 - :param var_name: 变量名 - :return: 变量值(任意候选值之一) - """ - res = cls.find_variables_in_gn((var_name,), path, stop_tail, use_cache) - if res: - return res[0] - return "" - @classmethod def replace_gn_variables(cls, s: str, gn_path: str, stop_tail: str) -> str: """ @@ -244,98 +179,15 @@ class GnCommonTool: return result -class SubsystemComponentNameFinder: - @classmethod - def __find_subsystem_component_from_bundle(cls, gn_path: str, stop_tail: str = "home") -> Tuple[str, str]: - """ - 根据BUILD.gn的全路径,一层层往上面查找bundle.json文件, - 并从bundle.json中查找component_name和subsystem - :param gn_path: gn文件的路径 - :param stop_tail: 当查找到stop_tail的时候停止 - :return: 子系统名称,部件名 - """ - filename = "bundle.json" - component_name = str() - subsystem_name = str() - if stop_tail not in gn_path: - return subsystem_name, component_name - if os.path.isfile(gn_path): - gn_path, _ = os.path.split(gn_path) - while not gn_path.endswith(stop_tail): - bundle_path = os.path.join(gn_path, filename) - if not os.path.isfile(bundle_path): # 如果该文件不在该目录下 - gn_path = os.path.split(gn_path)[0] - continue - with open(bundle_path, 'r', encoding='utf-8') as f: - content = json.load(f) - try: - component_name = content["component"]["name"] - subsystem_name = content["component"]["subsystem"] - except KeyError: - logging.warning( - "not found component/name or component/subsystem in bundle.json") - finally: - break - return component_name, subsystem_name - - @classmethod - def find_subsystem_component(cls, gn_file: str, project_path: str) -> Tuple[str, str]: - """ - 查找gn_file对应的component_name和subsystem - 如果在gn中找不到,就到bundle.json中去找 - :param gn_file: gn文件路径 - :param project_path: 项目路径 - :return: 子系统名,部件名 - """ - part_var_flag = False # 标识这个变量从gn中取出的原始值是不是变量 - subsystem_var_flag = False - component_pattern = r"component_name *= *(.*)" - subsystem_pattern = r"subsystem_name *= *(.*)" - with open(gn_file, 'r', encoding='utf-8') as f: - content = f.read() - subsystem_name = BasicTool.re_group_1( - content, subsystem_pattern).strip() - component_name = BasicTool.re_group_1( - content, component_pattern).strip() - if len(component_name) != 0: - if GnCommonTool.is_gn_variable(component_name): - part_var_flag = True - else: - component_name = component_name.strip('"') - if len(subsystem_name) != 0: # 这里是只是看有没有grep到关键字 - if GnCommonTool.is_gn_variable(subsystem_name): - subsystem_var_flag = True - else: - subsystem_name = subsystem_name.strip('"') - if part_var_flag or subsystem_var_flag: - s, c = GnCommonTool.find_variables_in_gn( - (subsystem_name, component_name), gn_file, project_path) - if part_var_flag: - component_name = c - if subsystem_var_flag: - subsystem_name = s - if len(subsystem_name) != 0 and len(component_name) != 0: - return subsystem_name, component_name - # 如果有一个没有找到,就要一层层去找bundle.json文件 - t_component_name, t_subsystem_name = cls.__find_subsystem_component_from_bundle( - gn_file, stop_tail=project_path) - if len(t_component_name) != 0: - component_name = t_component_name - if len(t_subsystem_name) != 0: - subsystem_name = t_subsystem_name - return component_name, subsystem_name - - class GnVariableParser: @classmethod def string_parser(cls, var: str, content: str) -> str: """ - 解析值为字符串的变量,没有对引号进行去除 + 解析值为字符串的变量,没有对引号进行去除,如果是a = b这种b为变量的,则无法匹配 :param content: 要进行解析的内容 :param var: 变量名 :return: 变量值[str] """ - # result = BasicTool.re_group_1(content, r"{} *= *(.*)".format(var)) result = BasicTool.re_group_1( content, r"{} *= *[\n]?(\".*?\")".format(var), flags=re.S | re.M) return result @@ -343,7 +195,7 @@ class GnVariableParser: @classmethod def list_parser(cls, var: str, content: str) -> List[str]: """ - 解析值为列表的变量,list的元素必须全为数字或字符串,且没有对引号进行去除 + 解析值为列表的变量,list的元素必须全为数字或字符串,且没有对引号进行去除,如果是a = b这种b为变量的,则无法匹配 :param var: 变量名 :param content: 要进行 :return: 变量值[List] diff --git a/tools/rom_ram_analyzer/L0L1/src/config.py b/tools/rom_ram_analyzer/L0L1/src/config.py index c15326b00969701a431a2f5c52d1ddb7da7fd037..05f1a2623be8fa43fdaa95a8158e739717e8e1f8 100644 --- a/tools/rom_ram_analyzer/L0L1/src/config.py +++ b/tools/rom_ram_analyzer/L0L1/src/config.py @@ -8,23 +8,22 @@ import preprocess from pkgs.simple_yaml_tool import SimpleYamlTool from pkgs.basic_tool import do_nothing, BasicTool from get_subsystem_component import SC -from post_handlers import SOPostHandler, APostHandler, DefaultPostHandler, HAPPostHandler, LiteLibPostHandler, LiteLibS2MPostHandler -from template_processor import BaseProcessor, DefaultProcessor, StrResourceProcessor, ListResourceProcessor, LiteComponentPostHandler -from target_name_parser import * -from info_handlers import extension_handler, hap_name_handler, target_type_handler +from misc import * +from template_processor import * """ 只给rom_analysis.py使用 """ + def parse_args(): parser = argparse.ArgumentParser( description="analysis rom size of L0 and L1 product") - parser.add_argument("-p", "--product_name", type=str, default="ipcamera_hispark_taurus_linux", + parser.add_argument("-p", "--product_name", type=str, help="product name. eg: -p ipcamera_hispark_taurus") parser.add_argument("-o", "--oh_path", type=str, default=".", help="root path of openharmony") - parser.add_argument("-r", "--recollect_gn", type=bool, - default=True, help="if recollect gn info or not") + parser.add_argument("-g", "--recollect_gn", action="store_false", help="recollect gn info or not") + parser.add_argument("-s", "--recollect_sc", action="store_false", help="recollect subsystem_component info or not") args = parser.parse_args() return args @@ -38,11 +37,16 @@ result_dict: Dict[str, Any] = dict() project_path = BasicTool.abspath(_args.oh_path) product_name = _args.product_name recollect_gn = _args.recollect_gn -_sc_json: Dict[Text, Text] = configs.get("subsystem_component_json") +_recollect_sc = _args.recollect_sc +_sc_json: Dict[Text, Text] = configs.get("subsystem_component") _sc_save = _sc_json.get("save") _target_type = configs["target_type"] _sc_output_path = _sc_json.get("filename") -sub_com_dict: Dict = SC.run(project_path, _sc_output_path, _sc_save) +if _recollect_sc: + sub_com_dict: Dict = SC.run(project_path, _sc_output_path, _sc_save) +else: + with open(_sc_output_path, 'r', encoding='utf-8') as f: + sub_com_dict = json.load(f) collector_config: Tuple[BaseProcessor] = ( DefaultProcessor(project_path=project_path, # 项目根路径 @@ -124,7 +128,7 @@ collector_config: Tuple[BaseProcessor] = ( "extension": extension_handler, }, unit_post_handler=LiteLibPostHandler(), - S2MPostHandler=LiteLibS2MPostHandler, + ud_post_handler=LiteLibS2MPostHandler, ), DefaultProcessor(project_path=project_path, # hap有个hap_name result_dict=result_dict, diff --git a/tools/rom_ram_analyzer/L0L1/src/config.yaml b/tools/rom_ram_analyzer/L0L1/src/config.yaml index 3d307165a1998b07f3b4292fb8a5c9ac2d55769b..f6e0f3e653551836392bf289210574c7e2e1ce9c 100644 --- a/tools/rom_ram_analyzer/L0L1/src/config.yaml +++ b/tools/rom_ram_analyzer/L0L1/src/config.yaml @@ -11,18 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# root: -# project_path: ~/oh -# 从bundle.json中取出的component和subsystem_name的信息 -# used by get_subsystem_component.py config.py -subsystem_component_json: - save: true - filename: sub_com_info.json - -output_file: result -# gn info -gn_info_file: gn_info.json # 注意:如果target_type有了更改,要相应改变config.py中collector_config target_type: - shared_library @@ -40,49 +29,46 @@ target_type: - lite_component -# 要分析的编译产物的根目录及各类型对应的子目录 -product_dir: - ipcamera_hispark_taurus: +subsystem_component: + save: true + filename: sub_com_info.json +gn_info_file: gn_info.json + +ipcamera_hispark_taurus: + product_infofile: ipcamera_hispark_taurus_product.json # 编译产物的信息 + output_name: ipcamera_hispark_taurus_result.json # 输出文件的名字 + product_dir: # [required] root: out/hispark_taurus/ipcamera_hispark_taurus/rootfs - relative: + relative: bin: bin so: usr/lib etc: etc - rest: True # 是否分析其他目录下的并归到etc - ipcamera_hispark_taurus_linux: - root: out/hispark_taurus/ipcamera_hispark_taurus_linux/rootfs - relative: - bin: bin - so: usr/lib - etc: etc - rest: True - wifiiot_hispark_pegasus: - root: out/hispark_pegasus/wifiiot_hispark_pegasus - relative: - a: libs - etc: etc - rest: False -# 各类型文件的匹配顺序 -query_order: - ipcamera_hispark_taurus: + rest: True # 是否分析其他目录下的并归到etc(copy的文件) + query_order: so: - shared_library - ohos_shared_library - ohos_prebuilt_shared_library - lite_library - - lite_component - - a: - - static_library - - ohos_static_library - - lite_library - + - lite_component + bin: - executable - ohos_executable - lite_component - ipcamera_hispark_taurus_linux: + +ipcamera_hispark_taurus_linux: + product_infofile: ipcamera_hispark_taurus_linux_product.json + output_name: ipcamera_hispark_taurus_linux_result.json + product_dir: + root: out/hispark_taurus/ipcamera_hispark_taurus_linux/rootfs + relative: + bin: bin + so: usr/lib + etc: etc + rest: True + query_order: so: - shared_library - ohos_shared_library @@ -90,23 +76,55 @@ query_order: - lite_library - lite_component - a: - - static_library - - ohos_static_library - - lite_library - bin: - executable - ohos_executable - lite_component - wifiiot_hispark_pegasus: + +wifiiot_hispark_pegasus: + product_infofile: wifiiot_hispark_pegasus_product.json + output_name: wifiiot_hispark_pegasus_result.json + product_dir: + root: out/hispark_pegasus/wifiiot_hispark_pegasus + relative: + a: libs + etc: etc + rest: False + query_order: a: - static_library - ohos_static_library - lite_library +rk3568: # rk的目前从packages/phone/system_module_info.json中分析准确度更高,因为rk基本都使用的是ohos_xxx,而L0和L1的更多的是使用的gn原生target template + product_infofile: rk3568_product.json + output_name: rk3568_result.json + product_dir: + root: out/rk3568/packages/phone/system + relative: + so: lib + bin: bin + hap: app + etc: etc + rest: True + query_order: + so: + - ohos_shared_library + - shared_library + - ohos_prebuilt_shared_library + - lite_library + - lite_component + + bin: + - ohos_executable + - executable + - lite_component + + hap: + - ohos_hap + # extension and prefix of products default_extension: shared_library: .so diff --git a/tools/rom_ram_analyzer/L0L1/src/gn_lineno_collector.py b/tools/rom_ram_analyzer/L0L1/src/gn_lineno_collector.py deleted file mode 100644 index 05f54c011b4e2b02d513a6c6bca5e6c144506520..0000000000000000000000000000000000000000 --- a/tools/rom_ram_analyzer/L0L1/src/gn_lineno_collector.py +++ /dev/null @@ -1,38 +0,0 @@ -from typing import * -import os -from collections import defaultdict -import preprocess -from pkgs.basic_tool import BasicTool -from pkgs.simple_yaml_tool import SimpleYamlTool - - -def gn_lineno_collect(match_pattern: str, project_path: str) -> DefaultDict[str, List[int]]: - """ - 在整个项目路径下搜索有特定target类型的BUILD.gn - :param match_pattern: 进行grep的pattern,支持扩展的正则 - :param project_path: 项目路径(搜索路径) - :return: {gn_file: [line_no_1, line_no_2, ..]} - """ - config = SimpleYamlTool.read_yaml("config.yaml") - # project_path = config.get("project_path") - black_list = map(lambda x: os.path.join( - project_path, x), config.get("black_list")) - - def handler(content: Text) -> List[str]: - return list(filter(lambda y: len(y) > 0, list(map(lambda x: x.strip(), content.split("\n"))))) - - grep_list = BasicTool.grep_ern(match_pattern, path=project_path, include="BUILD.gn", exclude=tuple(black_list), - post_handler=handler) - gn_line_dict: DefaultDict[str, List[int]] = defaultdict(list) - for gl in grep_list: - gn_file, line_no, _ = gl.split(":") - gn_line_dict[gn_file].append(line_no) - return gn_line_dict - - -if __name__ == '__main__': - res = gn_lineno_collect( - "^( *)ohos_shared_library\(.*?\)", BasicTool.abspath(project_path)) - for k, v in res.items(): - if "oh/out" in k: - print("file={}, line_no={}".format(k, v)) diff --git a/tools/rom_ram_analyzer/L0L1/src/info_handlers.py b/tools/rom_ram_analyzer/L0L1/src/info_handlers.py deleted file mode 100644 index bfdd12fceac82b026f595288a46e16948ef64681..0000000000000000000000000000000000000000 --- a/tools/rom_ram_analyzer/L0L1/src/info_handlers.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging -from typing import * -import preprocess -from pkgs.gn_common_tool import GnVariableParser - - -def extension_handler(paragraph: Text): - return GnVariableParser.string_parser("output_extension", paragraph).strip('"') - - -def hap_name_handler(paragraph: Text): - return GnVariableParser.string_parser("hap_name", paragraph).strip('"') - - -def target_type_handler(paragraph: Text): - tt = GnVariableParser.string_parser("target_type", paragraph).strip('"') - if not tt: - logging.info("parse 'target_type' failed, maybe it's a variable") - return tt diff --git a/tools/rom_ram_analyzer/L0L1/src/post_handlers.py b/tools/rom_ram_analyzer/L0L1/src/misc.py similarity index 55% rename from tools/rom_ram_analyzer/L0L1/src/post_handlers.py rename to tools/rom_ram_analyzer/L0L1/src/misc.py index c4cf9ac0d9314c339895dd4062987709b2f9af8e..d000507a5944a1712fe46caef25891ab68bfadd8 100644 --- a/tools/rom_ram_analyzer/L0L1/src/post_handlers.py +++ b/tools/rom_ram_analyzer/L0L1/src/misc.py @@ -1,11 +1,92 @@ -from typing import * -from abc import ABC, abstractmethod +import logging import copy +import os import logging +from abc import ABC, abstractmethod +from collections import defaultdict +from typing import * import preprocess +from pkgs.gn_common_tool import GnVariableParser from pkgs.simple_yaml_tool import SimpleYamlTool +from pkgs.basic_tool import BasicTool + + +_config = SimpleYamlTool.read_yaml("config.yaml") +""" +===============info handlers=============== +""" + + +def extension_handler(paragraph: Text): + return GnVariableParser.string_parser("output_extension", paragraph).strip('"') + + +def hap_name_handler(paragraph: Text): + return GnVariableParser.string_parser("hap_name", paragraph).strip('"') + + +def target_type_handler(paragraph: Text): + tt = GnVariableParser.string_parser("target_type", paragraph).strip('"') + if not tt: + logging.info("parse 'target_type' failed, maybe it's a variable") + return tt + + +""" +===============gn lineno collector=============== +""" + + +def gn_lineno_collect(match_pattern: str, project_path: str) -> DefaultDict[str, List[int]]: + """ + 在整个项目路径下搜索有特定target类型的BUILD.gn + :param match_pattern: 进行grep的pattern,支持扩展的正则 + :param project_path: 项目路径(搜索路径) + :return: {gn_file: [line_no_1, line_no_2, ..]} + """ + black_list = map(lambda x: os.path.join( + project_path, x), _config.get("black_list")) -_config = SimpleYamlTool.read_yaml("./config.yaml") + def handler(content: Text) -> List[str]: + return list(filter(lambda y: len(y) > 0, list(map(lambda x: x.strip(), content.split("\n"))))) + + grep_list = BasicTool.grep_ern(match_pattern, path=project_path, include="BUILD.gn", exclude=tuple(black_list), + post_handler=handler) + gn_line_dict: DefaultDict[str, List[int]] = defaultdict(list) + for gl in grep_list: + gn_file, line_no, _ = gl.split(":") + gn_line_dict[gn_file].append(line_no) + return gn_line_dict + + +""" +===============target name parser=============== +""" + + +class TargetNameParser: + @classmethod + def single_parser(cls, paragraph: Text) -> str: + """ + 查找类似shared_library("xxx")这种括号内只有一个参数的target的名称 + :param paragraph: 要解析的段落 + :return: target名称,如果是变量,不会对其进行解析 + """ + return BasicTool.re_group_1(paragraph, r"\w+\((.*)\)") + + @classmethod + def second_parser(cls, paragraph: Text) -> str: + """ + 查找类似target("shared_library","xxx")这种的target名称(括号内第二个参数) + :param paragraph: 要解析的段落 + :return: target名称,如果是变量,不会的其进行解析 + """ + return BasicTool.re_group_1(paragraph, r"\w+\(.*?, *(.*?)\)") + + +""" +===============post handlers=============== +""" class BasePostHandler(ABC): @@ -45,6 +126,8 @@ class SOPostHandler(BasePostHandler): extension = unit.get("extension") else: extension = _config.get("default_extension").get("shared_library") + if not extension.startswith('.'): + extension = '.'+extension if output_name.startswith(prefix): return output_name+extension return prefix+output_name+extension @@ -57,7 +140,9 @@ class APostHandler(BasePostHandler): def run(self, unit: Dict[str, AnyStr]): output_name = unit["output_name"] prefix = _config.get("default_prefix").get("static_library") - extension = _config.get("default_extension").get("static_library") + extension: str = _config.get("default_extension").get("static_library") + if not extension.startswith('.'): + extension = '.'+extension if output_name.startswith(prefix): return output_name+extension return prefix+output_name+extension @@ -79,6 +164,8 @@ class LiteLibPostHandler(BasePostHandler): else: prefix = str() extension = str() + if not extension.startswith('.'): + extension = '.'+extension if output_name.startswith(prefix): return output_name+extension return prefix+output_name+extension @@ -100,13 +187,20 @@ class LiteComponentPostHandler(BasePostHandler): unit["description"] = "virtual node" prefix = str() extension = str() + if not extension.startswith('.'): + extension = '.'+extension return prefix+output_name+extension -""" -==========================分割线=========================== -""" -def LiteLibS2MPostHandler(unit:Dict, result_dict:Dict)->None: +class TargetPostHandler(BasePostHandler): + """ + for target(a,b){}""" + + def run(self, unit: Dict[str, AnyStr]): + ... + + +def LiteLibS2MPostHandler(unit: Dict, result_dict: Dict) -> None: rt = unit.get("real_target_type") new_unit = copy.deepcopy(unit) if rt == "shared_library": @@ -120,20 +214,15 @@ def LiteLibS2MPostHandler(unit:Dict, result_dict:Dict)->None: new_unit["description"] = "may not exist" result_dict["lite_library"][k] = new_unit else: - logging.warning(f"target type should be 'shared_library' or 'static_library', but got '{rt}'") + logging.warning( + f"target type should be 'shared_library' or 'static_library', but got '{rt}'") new_unit["real_target_type"] = "shared_library" k = LiteLibPostHandler()(new_unit) new_unit["description"] = "may not exist" result_dict["lite_library"][k] = new_unit - + new_new_unit = copy.deepcopy(unit) new_new_unit["real_target_type"] = "static_library" k = LiteLibPostHandler()(new_new_unit) new_new_unit["description"] = "may not exist" result_dict["lite_library"][k] = new_new_unit - - -if __name__ == '__main__': - h = SOPostHandler() - pseudo_d = {"output_name": "libmmp"} - print(h(pseudo_d)) diff --git a/tools/rom_ram_analyzer/L0L1/src/rom_analysis.py b/tools/rom_ram_analyzer/L0L1/src/rom_analysis.py index 11b8ce6da91ed62fed68e62ea14705034643516d..ba1c6e192a94470e0eaf648d0325afbd6994ce7d 100644 --- a/tools/rom_ram_analyzer/L0L1/src/rom_analysis.py +++ b/tools/rom_ram_analyzer/L0L1/src/rom_analysis.py @@ -11,12 +11,13 @@ from concurrent.futures import ThreadPoolExecutor, Future from threading import RLock import collections -from gn_lineno_collector import gn_lineno_collect -from config import result_dict, collector_config, configs, project_path, sub_com_dict, product_name, recollect_gn +from config import result_dict, collector_config, configs, \ + project_path, sub_com_dict, product_name, recollect_gn # from gn_info_collect import GnInfoCollector from pkgs.basic_tool import BasicTool from pkgs.gn_common_tool import GnCommonTool from pkgs.simple_excel_writer import SimpleExcelWriter +from misc import gn_lineno_collect """ @@ -30,17 +31,6 @@ from pkgs.simple_excel_writer import SimpleExcelWriter """ -# def parse_args(): -# parser = argparse.ArgumentParser( -# description="analysis rom size of L0 and L1 product") -# parser.add_argument("-p", "--product_name", type=str, default="ipcamera_hispark_taurus_linux", -# help="product name. eg: -p ipcamera_hispark_taurus") -# parser.add_argument("-r", "--recollect_gn", type=bool, -# default=True, help="if recollect gn info or not") -# args = parser.parse_args() -# return args - - class RomAnalysisTool: @classmethod def collect_gn_info(cls): @@ -55,9 +45,37 @@ class RomAnalysisTool: json.dump(result_dict, f, indent=4) @classmethod - def __find_files(cls, product_name: str) -> Dict[str, List[str]]: - product_dir: Dict[str, Dict] = configs["product_dir"].get( - f"{product_name}") + def _add_rest_dir(cls, top_dir: str, rela_path: str, sub_path: str, dir_list: List[str]) -> None: + """ + dir_list:相对于原始top目录的所有子目录的全路径 + """ + if not sub_path: + return + # 将其他目录添加到dir_list + all_subdir = os.listdir(os.path.join(top_dir, rela_path)) + for d in all_subdir: + t = os.path.join(rela_path, d) + if os.path.isdir(os.path.join(top_dir, t)) and t not in dir_list: + dir_list.append(t) + # 移除sub_path的当前层级的目录 + t = sub_path.split(os.sep) + if os.path.join(rela_path, t[0]) in dir_list: + dir_list.remove(os.path.join(rela_path, t[0])) + else: + logging.error( + f"'{os.path.join(rela_path,t[0])}' not in '{top_dir}'") + sp = str() + if len(t) == 1: + return + elif len(t) == 2: + sp = t[1] + else: + sp = os.path.join(*t[1:]) + cls._add_rest_dir(top_dir, os.path.join(rela_path, t[0]), sp, dir_list) + + @classmethod + def _find_files(cls, product_name: str) -> Dict[str, List[str]]: + product_dir: Dict[str, Dict] = configs[product_name]["product_dir"] if not product_name: logging.error( f"product_name '{product_name}' not found in the config.yaml") @@ -84,14 +102,7 @@ class RomAnalysisTool: rest_dir_list: List[str] = os.listdir( root_dir) # 除了配置在relative下之外的所有剩余目录,全部归到etc下 for v in relative_dir.values(): - # FIXME 对于配置文件中relative包含/的,如a/b/c,需要进一步特殊处理 - if '/' in v: - v = os.path.split(v)[0] - if v in rest_dir_list: - rest_dir_list.remove(v) - else: - logging.warning( - f"config error: {v} not found in {product_dir}") + cls._add_rest_dir(root_dir, str(), v, rest_dir_list) if "etc" not in product_dict.keys(): product_dict["etc"] = list() for r in rest_dir_list: @@ -101,8 +112,8 @@ class RomAnalysisTool: @classmethod def collect_product_info(cls, product_name: str): - product_dict: Dict[str, List[str]] = cls.__find_files(product_name) - with open(f"{product_name}_product.json", 'w', encoding='utf-8') as f: + product_dict: Dict[str, List[str]] = cls._find_files(product_name) + with open(configs[product_name]["product_infofile"], 'w', encoding='utf-8') as f: json.dump(product_dict, f, indent=4) return product_dict @@ -130,21 +141,27 @@ class RomAnalysisTool: rom_size_dict["size"] += size @classmethod - def _fuzzy_match(cls, file_name: str) -> Tuple[str, str, str]: + def _fuzzy_match(cls, file_name: str, extra_black_list: Tuple[str] = ("test",)) -> Tuple[str, str, str]: + """ + 直接grep,利用出现次数最多的BUILD.gn去定位subsystem_name和component_name""" _, base_name = os.path.split(file_name) if base_name.startswith("lib"): base_name = base_name[3:] if base_name.endswith(".a"): base_name = base_name[:base_name.index(".a")] - if base_name.endswith(".z.so"): + elif base_name.endswith(".z.so"): base_name = base_name[:base_name.index(".z.so")] elif base_name.endswith(".so"): base_name = base_name[:base_name.index(".so")] exclude_dir = [os.path.join(project_path, x) for x in configs["black_list"]] - exclude_dir.append("test") - grep_result: List[str] = BasicTool.grep_ern(base_name, project_path, include="BUILD.gn", exclude=tuple(exclude_dir - ), post_handler=lambda x: list(filter(lambda x: len(x) > 0, x.split('\n')))) + exclude_dir.extend(list(extra_black_list)) + grep_result: List[str] = BasicTool.grep_ern( + base_name, + project_path, + include="BUILD.gn", + exclude=tuple(exclude_dir), + post_handler=lambda x: list(filter(lambda x: len(x) > 0, x.split('\n')))) if not grep_result: return str(), str(), str() gn_dict: Dict[str, int] = collections.defaultdict(int) @@ -158,7 +175,7 @@ class RomAnalysisTool: return str(), str(), str() @classmethod - def save_as_xls(cls, result_dict: Dict, product_name: str) -> None: + def _save_as_xls(cls, result_dict: Dict, product_name: str) -> None: header = ["subsystem_name", "component_name", "output_file", "size(Byte)"] tmp_dict = copy.deepcopy(result_dict) @@ -199,11 +216,8 @@ class RomAnalysisTool: excel_writer.write_merge(subsystem_start_row, subsystem_col, subsystem_end_row, subsystem_col, subsystem_name) subsystem_start_row = subsystem_end_row + 1 - output_name = configs["output_file"] - ot, base_name = os.path.split(output_name) - ol = list(ot) - ol.append(product_name + "_" + base_name+".xls") - output_name = os.path.join(*ol) + output_name: str = configs[product_name]["output_name"] + output_name = output_name.replace(".json", ".xls") excel_writer.save(output_name) @ classmethod @@ -212,7 +226,7 @@ class RomAnalysisTool: with open(gn_info_file, 'r', encoding='utf-8') as f: gn_info = json.load(f) query_order: Dict[str, List[str] - ] = configs["query_order"][product_name] + ] = configs[product_name]["query_order"] query_order["etc"] = configs["target_type"] rom_size_dict: Dict = dict() # prodcut_dict: {"a":["a.txt", ...]} @@ -259,13 +273,9 @@ class RomAnalysisTool: "file_name": f.replace(project_path, ""), "size": size, }, rom_size_dict) - ot, base_output_filename = os.path.split(configs["output_file"]) - ol = list(ot) - ol.append(product_name + "_"+base_output_filename+".json") - output_file = os.path.join(*ol) - with open(output_file, 'w', encoding='utf-8') as f: + with open(configs[product_name]["output_name"], 'w', encoding='utf-8') as f: json.dump(rom_size_dict, f, indent=4) - cls.save_as_xls(rom_size_dict, product_name) + cls._save_as_xls(rom_size_dict, product_name) def main(): @@ -278,3 +288,8 @@ def main(): if __name__ == "__main__": main() + # t = os.listdir( + # "/home/aodongbiao/developtools_integration_verification/tools") + # RomAnalysisTool._add_rest_dir( + # "/home/aodongbiao/developtools_integration_verification/tools", "", "rom_ram_analyzer/L2/pkgs", t) + # print(t) diff --git a/tools/rom_ram_analyzer/L0L1/src/subsystem_component_helper.py b/tools/rom_ram_analyzer/L0L1/src/subsystem_component_helper.py deleted file mode 100644 index d3837bed7c21b7e0d0c4171933354ef2eff29fee..0000000000000000000000000000000000000000 --- a/tools/rom_ram_analyzer/L0L1/src/subsystem_component_helper.py +++ /dev/null @@ -1,94 +0,0 @@ -import os -import logging -import json -import re -from typing import * -from pkgs.gn_common_tool import GnCommonTool - - -class SubsystemComponentNameFinder: - @classmethod - def _find_subsystem_component_from_bundle(cls, gn_path: str, stop_tail: str = "home") -> Tuple[str, str]: - """ - 根据BUILD.gn的全路径,一层层往上面查找bundle.json文件, - 并从bundle.json中查找component_name和subsystem - """ - filename = "bundle.json" - subsystem_name = str() - component_name = str() - if stop_tail not in gn_path: - logging.error("{} not in {}".format(stop_tail, gn_path)) - return subsystem_name, component_name - if os.path.isfile(gn_path): - gn_path, _ = os.path.split(gn_path) - while not gn_path.endswith(stop_tail): - bundle_path = os.path.join(gn_path, filename) - if not os.path.isfile(bundle_path): # 如果该文件不在该目录下 - gn_path, _ = os.path.split(gn_path) - continue - with open(bundle_path, 'r', encoding='utf-8') as f: - content = json.load(f) - try: - component_name = content["component"]["name"] - subsystem_name = content["component"]["subsystem"] - except KeyError: - logging.warning( - "not found component/name or component/subsystem in bundle.json") - finally: - break - return subsystem_name, component_name - - @classmethod - def _parse_subsystem_component(cls, content: str) -> Tuple[Text, Text]: - """ - 从字符串中提取subsystem_name和component_name字段 - """ - subsystem_name = str() - component_name = str() - subsystem = re.search(r"subsystem_name *=\s*(\S*)", content) - part = re.search(r"component_name *=\s*(\S*)", content) - if subsystem: - subsystem_name = subsystem.group(1) - if part: - component_name = part.group(1) - return subsystem_name, component_name - - @classmethod - def find_part_subsystem(cls, gn_file: str, project_path: str) -> Tuple[Text, Text]: - """ - 查找gn_file对应的component_name和subsystem - 如果在gn中找不到,就到bundle.json中去找 - FIXME 一个gn文件中的target不一定属于同一个component,比如hap包 - """ - part_var_flag = False # 标识这个变量从gn中取出的原始值是不是变量 - subsystem_var_flag = False - var_list = list() - with open(gn_file, 'r', encoding='utf-8') as f: - subsystem_name, component_name = cls._parse_subsystem_component(f.read()) - if len(component_name) != 0 and GnCommonTool.is_gn_variable(component_name): - part_var_flag = True - var_list.append(component_name) - - if len(subsystem_name) != 0 and GnCommonTool.is_gn_variable(subsystem_name): - subsystem_var_flag = True - var_list.append(subsystem_name) - - if part_var_flag and subsystem_var_flag: - component_name, subsystem_name = GnCommonTool.find_variables_in_gn( - tuple(var_list), gn_file, project_path) - elif part_var_flag: - component_name = GnCommonTool.find_variables_in_gn( - tuple(var_list), gn_file, project_path)[0] - elif subsystem_var_flag: - subsystem_name = GnCommonTool.find_variables_in_gn( - tuple(var_list), gn_file, project_path)[0] - if len(component_name) != 0 and len(subsystem_name) != 0: - return component_name, subsystem_name - # 如果有一个没有找到,就要一层层去找bundle.json文件 - t_component_name, t_subsystem_name = cls._find_subsystem_component_from_bundle( - gn_file, stop_tail=project_path) - if len(component_name) == 0: - component_name = t_component_name - if len(subsystem_name) == 0: - subsystem_name = t_subsystem_name - return component_name, subsystem_name diff --git a/tools/rom_ram_analyzer/L0L1/src/target_name_parser.py b/tools/rom_ram_analyzer/L0L1/src/target_name_parser.py deleted file mode 100644 index 9d1f84bb0e8bac624d4ccbc2c2bf61df2102233a..0000000000000000000000000000000000000000 --- a/tools/rom_ram_analyzer/L0L1/src/target_name_parser.py +++ /dev/null @@ -1,25 +0,0 @@ -import sys -from typing import * - -import preprocess -from pkgs.basic_tool import BasicTool - - -class TargetNameParser: - @classmethod - def single_parser(cls, paragraph: Text) -> str: - """ - 查找类似shared_library("xxx")这种括号内只有一个参数的target的名称 - :param paragraph: 要解析的段落 - :return: target名称,如果是变量,不会对其进行解析 - """ - return BasicTool.re_group_1(paragraph, r"\w+\((.*)\)") - - @classmethod - def second_parser(cls, paragraph: Text) -> str: - """ - 查找类似target("shared_library","xxx")这种的target名称(括号内第二个参数) - :param paragraph: 要解析的段落 - :return: target名称,如果是变量,不会的其进行解析 - """ - return BasicTool.re_group_1(paragraph, r"\w+\(.*?, *(.*?)\)") diff --git a/tools/rom_ram_analyzer/L0L1/src/template_processor.py b/tools/rom_ram_analyzer/L0L1/src/template_processor.py index 47d56970502504160b56e429990faa332b9f74b3..e4124c83a08293124b60b8ec7751db089af5da80 100644 --- a/tools/rom_ram_analyzer/L0L1/src/template_processor.py +++ b/tools/rom_ram_analyzer/L0L1/src/template_processor.py @@ -13,17 +13,14 @@ # limitations under the License. # -from threading import RLock from typing import * from abc import ABC, abstractmethod -from collections import defaultdict import os import logging -from gn_lineno_collector import gn_lineno_collect from pkgs.basic_tool import do_nothing, BasicTool -from pkgs.gn_common_tool import GnCommonTool, SubsystemComponentNameFinder, GnVariableParser -from post_handlers import * +from pkgs.gn_common_tool import GnCommonTool, GnVariableParser +from misc import * TYPE = Literal["str", "list"] @@ -50,7 +47,7 @@ class BaseProcessor(ABC): Text], Union[str, list]]] = dict(), unit_post_handler: BasePostHandler = do_nothing, resource_field: str = None, - S2MPostHandler: Callable[[Dict, Dict], None] = None + ud_post_handler: Callable[[Dict, Dict], None] = None ): """ :param project_path: 项目根路径 @@ -63,7 +60,7 @@ class BaseProcessor(ABC): SourceParser是对target段落进行分析处理的Callable,接受一个字符串作为参数 :param unit_post_handler: 对最终要存储的结果字典进行后处理,应当返回一个字符串作为存储时的key,且该key应为预期产物去除前后缀后的名字 :resource_field: 针对资源类target,资源字段,如files = ["a.txt","b.txt"],则field为files - :S2MPostHandler: 将一个target保存为多个多个的处理器 + :ud_post_handler: 参数为unit和result_dict的handler """ if target_type not in result_dict.keys(): result_dict[target_type] = dict() @@ -78,7 +75,7 @@ class BaseProcessor(ABC): self.other_info_handlers = other_info_handlers self.unit_post_handler = unit_post_handler self.resource_field = resource_field - self.S2MPostHandler = S2MPostHandler + self.ud_post_handler = ud_post_handler def _append(self, key: str, unit: Dict) -> None: """ @@ -109,43 +106,32 @@ class BaseProcessor(ABC): self.run() +def _gn_var_process(project_path: str, gn_v: str, alt_v: str, gn_path: str, ifrom: str, efrom: str, strip_quote: bool = False) -> Tuple[str, str]: + if strip_quote: + gn_v = gn_v.strip('"') + if gn_v: + if GnCommonTool.contains_gn_variable(gn_v): + gn_v = GnCommonTool.replace_gn_variables( + gn_v, gn_path, project_path).strip('"') + else: + gn_v = gn_v.strip('"') + gn_f = ifrom + else: + gn_v = alt_v + gn_f = efrom + return gn_v, gn_f + + class DefaultProcessor(BaseProcessor): def helper(self, target_name: str, paragraph: str, gn_path: str, line_no: int, _sub: str, _com: str) -> Tuple[str]: output_name = GnVariableParser.string_parser("output_name", paragraph) - if output_name.strip('"'): - if GnCommonTool.contains_gn_variable(output_name): - output_name = GnCommonTool.replace_gn_variables( - output_name, gn_path, self.project_path).strip('"') - out_from = "output_name" - else: - output_name = output_name.strip('"') - out_from = "target_name" - else: - output_name = target_name - out_from = "target_name" + output_name, out_from = _gn_var_process(self.project_path, + output_name, target_name, gn_path, "target_name", "target_name", True) sub = GnVariableParser.string_parser("subsystem_name", paragraph) com = GnVariableParser.string_parser("part_name", paragraph) - if sub.strip('"'): - if GnCommonTool.contains_gn_variable(sub): - sub = GnCommonTool.replace_gn_variables( - sub, gn_path, self.project_path).strip('"') - else: - sub = sub.strip('"') - sub_from = "gn" - else: - sub = _sub - sub_from = "json" - if com.strip('"'): - if GnCommonTool.contains_gn_variable(com): - com = GnCommonTool.replace_gn_variables( - com, gn_path, self.project_path).strip('"') - else: - com = com.strip('"') - com_from = "gn" - else: - com = _com - com_from = "json" + sub, sub_from = _gn_var_process(self.project_path, sub, _sub, gn_path, "gn", "json", True) + com, com_from = _gn_var_process(self.project_path, com, _com, gn_path, "gn", "json", True) result = { "gn_path": gn_path, "target_type": self.target_type, @@ -162,8 +148,8 @@ class DefaultProcessor(BaseProcessor): result[k] = h(paragraph) key = self.unit_post_handler(result) self._append(key, result) - if self.S2MPostHandler: - self.S2MPostHandler(result, self.result_dict) + if self.ud_post_handler: + self.ud_post_handler(result, self.result_dict) def run(self): for gn_path, line_no_list in self.gn_file_line_no_dict.items(): @@ -193,38 +179,17 @@ class StrResourceProcessor(DefaultProcessor): def helper(self, target_name: str, paragraph: str, gn_path: str, line_no: int, _sub: str, _com: str) -> Tuple[str]: resources = GnVariableParser.string_parser( self.resource_field, paragraph) - if not resources.strip('"'): + if not resources: return + _, resources = os.path.split(resources.strip('"')) + if GnCommonTool.contains_gn_variable(resources): resources = GnCommonTool.replace_gn_variables( resources, gn_path, self.project_path).strip('"') - # FIXME 如果出现换行导致的在replace_gn_variables里面没有查找到变量的对应值,则直接取target_name作为resources - if GnCommonTool.contains_gn_variable(resources): - resources = target_name - else: - resources = resources.strip('"') sub = GnVariableParser.string_parser("subsystem_name", paragraph) com = GnVariableParser.string_parser("part_name", paragraph) - if sub: - if GnCommonTool.contains_gn_variable(sub): - sub = GnCommonTool.replace_gn_variables( - sub, gn_path, self.project_path).strip('"') - else: - sub = sub.strip('"') - sub_from = "gn" - else: - sub = _sub - sub_from = "json" - if com: - if GnCommonTool.contains_gn_variable(com): - com = GnCommonTool.replace_gn_variables( - com, gn_path, self.project_path).strip('"') - else: - com = com.strip('"') - com_from = "gn" - else: - com = _com - com_from = "json" + sub, sub_from = _gn_var_process(self.project_path, sub, _sub, gn_path, "gn", "json") + com, com_from = _gn_var_process(self.project_path, com, _com, gn_path, "gn", "json") _, file_name = os.path.split(resources) result = { "gn_path": gn_path, @@ -245,6 +210,7 @@ class StrResourceProcessor(DefaultProcessor): class ListResourceProcessor(DefaultProcessor): + def helper(self, target_name: str, paragraph: str, gn_path: str, line_no: int, _sub: str, _com: str) -> Tuple[str]: resources = GnVariableParser.list_parser( self.resource_field, paragraph) @@ -252,26 +218,8 @@ class ListResourceProcessor(DefaultProcessor): return sub = GnVariableParser.string_parser("subsystem_name", paragraph) com = GnVariableParser.string_parser("part_name", paragraph) - if sub: - if GnCommonTool.contains_gn_variable(sub): - sub = GnCommonTool.replace_gn_variables( - sub, gn_path, self.project_path).strip('"') - else: - sub = sub.strip('"') - sub_from = "gn" - else: - sub = _sub - sub_from = "json" - if com: - if GnCommonTool.contains_gn_variable(com): - com = GnCommonTool.replace_gn_variables( - com, gn_path, self.project_path).strip('"') - else: - com = com.strip('"') - com_from = "gn" - else: - com = _com - com_from = "json" + sub, sub_from = _gn_var_process(self.project_path, sub, _sub, gn_path, "gn", "json") + com, com_from = _gn_var_process(self.project_path, com, _com, gn_path, "gn", "json") for ff in resources: _, file_name = os.path.split(ff) result = { diff --git a/tools/rom_ram_analyzer/L2/README.md b/tools/rom_ram_analyzer/L2/README.md index 74965836418a8d4d10e7585fe26789808385775b..52bc5a909162a30ed782103fee55e03f874794f0 100644 --- a/tools/rom_ram_analyzer/L2/README.md +++ b/tools/rom_ram_analyzer/L2/README.md @@ -117,7 +117,7 @@ -n DEVICE_NUM, --device_num DEVICE_NUM device number to be collect hidumper info. eg: -n 7001005458323933328a01fce16d3800 -o OUTPUT_FILENAME, --output_filename OUTPUT_FILENAME - base name of output file, default: rom_analysis_result. eg: -o ram_analysis_result + base name of output file, default: ram_analysis_result. eg: -o ram_analysis_result -e EXCEL, --excel EXCEL if output result as excel, default: False. eg: -e True ``` diff --git a/tools/rom_ram_analyzer/L2/pkgs/basic_tool.py b/tools/rom_ram_analyzer/L2/pkgs/basic_tool.py index 18941d90270215cda4b4a0a6802bedb4078228c4..b4d548eedd9e0bfcb51a117b2b6845454786f197 100644 --- a/tools/rom_ram_analyzer/L2/pkgs/basic_tool.py +++ b/tools/rom_ram_analyzer/L2/pkgs/basic_tool.py @@ -1,6 +1,7 @@ import sys import typing import os +import glob from pathlib import Path from typing import * diff --git a/tools/rom_ram_analyzer/L2/ram_analyzer.py b/tools/rom_ram_analyzer/L2/ram_analyzer.py index efd8faf3f1960d04720691e28c625884d26ab4d8..6ecd382857d1e9b76ca1e65edf506cfbbeace3ad 100644 --- a/tools/rom_ram_analyzer/L2/ram_analyzer.py +++ b/tools/rom_ram_analyzer/L2/ram_analyzer.py @@ -39,10 +39,9 @@ class HDCTool: stderr = str(cp.stderr) return device_num in stderr or device_num in stdout - __MODE = typing.Literal["stdout", "stderr"] @classmethod - def exec(cls, args: list, output_from: __MODE = "stdout"): + def exec(cls, args: list, output_from: str = "stdout"): cp = subprocess.run(args, capture_output=True) if output_from == "stdout": return cp.stdout.decode() @@ -71,7 +70,6 @@ class RamAnalyzer: blank_pattern = re.compile(r"\s+(?#匹配一个或多个空格)") return re.sub(blank_pattern, ' ', content.strip()).split() - __SS_Mode = typing.Literal["Pss", "Vss", "Rss", "Uss"] # 提示输入 __ss_dict: typing.Dict[str, int] = { "Pss": 2, "Vss": 3, @@ -80,7 +78,7 @@ class RamAnalyzer: } @classmethod - def __parse_hidumper_mem(cls, content: typing.Text, device_num: str, ss: __SS_Mode = "Pss") -> typing.Dict[ + def __parse_hidumper_mem(cls, content: typing.Text, device_num: str, ss: str = "Pss") -> typing.Dict[ typing.Text, int]: """ 解析:hidumper --meme的结果 @@ -408,7 +406,7 @@ def get_args(): parser.add_argument("-n", "--device_num", type=str, required=True, help="device number to be collect hidumper info. eg: -n 7001005458323933328a01fce16d3800") parser.add_argument("-o", "--output_filename", default="ram_analysis_result", type=str, - help="base name of output file, default: rom_analysis_result. eg: -o ram_analysis_result") + help="base name of output file, default: ram_analysis_result. eg: -o ram_analysis_result") parser.add_argument("-e", "--excel", type=bool, default=False, help="if output result as excel, default: False. eg: -e True") args = parser.parse_args()