diff --git a/DeployDevice/src/controller.py b/DeployDevice/src/controller.py
index babf2b3fd8deefcdd1f2a34e8aa6a2a53430f67f..6b8715727969e0aa49adc3f368b0d77fbce64a56 100644
--- a/DeployDevice/src/controller.py
+++ b/DeployDevice/src/controller.py
@@ -16,6 +16,7 @@ from core.run import *
from aw.Common.Constant import CONSTANT
from util.log_info import logger
+
if __name__ == "__main__":
param_file = sys.argv[1]
@@ -26,6 +27,12 @@ if __name__ == "__main__":
errcode = start(cust_steps)
if errcode != 0:
logger.printLog("执行失败:错误信息:" + CONSTANT.ENVERRMESSAGE)
- logger.printLog(errcode)
+ logger.printLog("UPGRADE_STATUS_%s" % errcode)
+ from func.liteOsUpgrade.liteOsUpgrade_RK3568_app import total_time
+ try:
+ with open(param_file, "a+") as f:
+ f.write("\nstatus=UPGRADE_STATUS_%s\nsmoke_duration=%s" % (errcode, total_time))
+ except Exception as p:
+ logger.error(p)
os._exit(errcode)
diff --git a/DeployDevice/src/core/run.py b/DeployDevice/src/core/run.py
index acc0bf07aba705aff155916cd60aa910d5bf70cf..7986abdb1909c64c59aea3dbc932d32dbe4e4140 100644
--- a/DeployDevice/src/core/run.py
+++ b/DeployDevice/src/core/run.py
@@ -58,19 +58,19 @@ def start(param):
scriptpath =os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
loader_tool_path = os.path.join(scriptpath, "resource", "RK3568_tool", "upgrade_tool.exe")
logger.info(loader_tool_path)
- mycmd = "%s LD" % loader_tool_path
- try:
- num = send_cmd(mycmd)
- if num != 2:
- logger.info("try again!")
- time.sleep(40)
- num = send_cmd(mycmd)
- if num != 2:
- logger.error("有设备断连,全部处置为真隔离状态!")
- errorcode = 200
- except Exception as f:
- logger.error(f)
- logger.info(errorcode)
+# mycmd = "%s LD" % loader_tool_path
+# try:
+# num = send_cmd(mycmd)
+# if num != 2:
+# logger.info("try again!")
+# time.sleep(40)
+# num = send_cmd(mycmd)
+# if num != 2:
+# logger.error("有设备断连,全部处置为真隔离状态!")
+# errorcode = 200
+# except Exception as f:
+# logger.error(f)
+# logger.info(errorcode)
if errorcode == 99:
logger.error("upgrade success ,but Smoke failed, the fatal problem occurs.")
return 99
diff --git a/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app-926.py b/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app-926.py
deleted file mode 100644
index 849dbc0bd4f2b9ba998fce8cc1334df8646ebcc1..0000000000000000000000000000000000000000
--- a/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app-926.py
+++ /dev/null
@@ -1,511 +0,0 @@
-# -*- coding:utf-8 -*-
-import uuid
-import sys
-import subprocess
-import os
-import time
-import re
-import shutil
-import random
-
-
-from core.base import BaseApp, dec_stepmsg
-from util.file_locker import FileLock
-from util.log_info import logger
-from util.time_info import get_now_time_str_info, get_now_time_info, Timeout, timeout
-from aw.Download.Download import *
-from aw.Common.Constant import CONSTANT
-from aw.Common.Common import getFileName
-from aw.ExtractFile.ExtractFile import *
-from aw.Common.Common import getHostIp, copyFile, copyDirectory
-
-lock_suffix = CONSTANT.File.LOCK_SUFFIX
-suc_file = CONSTANT.File.SUC_FILE
-failed_file = CONSTANT.File.FAILED_FILE
-REBOOT_TIMEOUT = 20000000
-
-
-class liteOsUpgrade_RK3568(BaseApp):
- '''
- @author: cwx1076044
- '''
-
- def __init__(self, param_file):
- super().__init__(param_file)
- self.param_List = ["upgrade_upgradeLocation", "sn"]
-
- @dec_stepmsg("hongmeng RK3568 flash")
- def excute(self):
- '''
- #===================================================================================
- # @Method: excute(self)
- # @Precondition: none
- # @Func: 升级执行入口
- # @PostStatus: none
- # @eg: excute()
- # @return: True or Flase
- #===================================================================================
- '''
- step_index = self.params_dict.get("step_list").index("liteOsUpgrade_RK3568_app")
-
- # 执行下载
- try:
- if not self.download():
- CONSTANT.ENVERRMESSAGE = "image download fail"
- logger.printLog(CONSTANT.ENVERRMESSAGE)
- return False
- except Exception as e:
- logger.error(e)
- raise e
-
- # 执行升级
- try:
- return_code = self.upgrade()
- if not return_code:
- CONSTANT.ENVERRMESSAGE = "board upgrade fail"
- logger.printLog(CONSTANT.ENVERRMESSAGE)
- return False
- if return_code == 98:
- return 98
- if return_code == 99:
- return 99
- return True
- except Exception as e:
- logger.error(e)
- raise e
-
- @dec_stepmsg("upgrade")
- @timeout(3600)
- def upgrade(self):
- '''
- #===================================================================================
- # @Method: upgrade(self)
- # @Precondition: none
- # @Func: 升级相关业务逻辑
- # @PostStatus: none
- # @eg: upgrade()
- # @return: True or Flase
- #===================================================================================
- '''
- global local_image_path, loader_tool_path, sn, LocationID ,test_num
- upgrade_test_type = self.params_dict.get("UpgradeTestType")
- sn = self.params_dict.get("sn")
- LocationID = self.params_dict.get("LocationID")
- test_num = self.params_dict.get("test_num")
- pr_url = self.params_dict.get("pr_url")
- logFilePath = self.logFilePath
- logger.info(logFilePath)
- r = logFilePath.rfind("\\")
- report_path = logFilePath[:r]
- logger.info(report_path)
- scriptpath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))))
- logger.info(scriptpath)
- local_image_path = os.path.join(version_savepath)
- logger.info(local_image_path)
- loader_tool_path = os.path.join(scriptpath, "resource", "RK3568_tool", "upgrade_tool.exe")
- logger.info(loader_tool_path)
- mini_path = os.path.join(local_image_path, "mini_system_test", "L2_mini_system_test.py")
- archive_path = os.path.join(version_savepath)
- if not self.check_devices_mode():
- check_devices_cmd = "hdc_std list targets"
- f = send_times(check_devices_cmd)
- logger.info(f)
- if not f or "Empty" in f:
- logger.error("No devices found,please check the device.")
- return False
- else:
- logger.info("3568 board is connected.")
- return self.check_devices_mode()
- else:
- # 下載鏡像
- upgrde_loader_cmd = "%s -s %s UL %s/MiniLoaderAll.bin -noreset" % (loader_tool_path, LocationID, local_image_path)
- h = sendCmd(upgrde_loader_cmd)
- logger.info(h)
- if "Upgrade loader ok" not in h:
- logger.error("Download MiniLoaderAll.bin Fail!")
- return False
- else:
- logger.printLog("Download MiniLoaderAll.bin Success!")
- # time.sleep(3)
- write_gpt_cmd = "%s -s %s DI -p %s/parameter.txt" % (loader_tool_path, LocationID, local_image_path)
- j = sendCmd(write_gpt_cmd)
- logger.info(j)
- if "Write gpt ok" not in j:
- logger.error("Failed to execute the parameter.txt")
- return False
- else:
- logger.printLog("Successfully executed parameter.txt.")
- # time.sleep(5)
- download_uboot_cmd = "%s -s %s DI -uboot %s/uboot.img %s/parameter.txt" % (
- loader_tool_path, LocationID, local_image_path, local_image_path)
- k = sendCmd(download_uboot_cmd)
- logger.info(k)
- if "Download image ok" not in k:
- logger.error("Failed to download the uboot.image!")
- if self.check_devices_mode():
- return 98
- return False
- else:
- logger.printLog("The uboot.image downloaded successfully!")
- # time.sleep(5)
- if not self.flash_version():
- return False
- reboot_devices_cmd = "%s -s %s RD" % (loader_tool_path, LocationID)
- reboot_result = sendCmd(reboot_devices_cmd)
- logger.info(reboot_result)
- time.sleep(30)
- try:
- if upgrade_test_type != "mini_system_test":
- if not start_cmd(sn):
- if self.check_devices_mode():
- return 98
- return False
- except Exception as t:
- logger.info(t)
- if self.check_devices_mode():
- return 98
- return False
- time.sleep(10)
- if "Reset Device OK" not in reboot_result:
- logger.error("Failed to reboot the board!")
- return False
- else:
- logger.info("Reboot successfully!")
- logger.printLog("******下载完成,升级成功,开始进行冒烟测试******")
- if upgrade_test_type == "null":
- return True
- screenshot_path = os.path.join(local_image_path, "screenshot")
- resource_path = os.path.join(screenshot_path, "resource")
- logger.info(resource_path)
- py_path = os.path.join(resource_path, "capturescreentest.py")
- new_report_path = os.path.join(report_path, "result")
- logger.info(new_report_path)
- time_sleep = random.randint(1, 5)
- time.sleep(time_sleep)
- try:
- if not os.path.exists(new_report_path):
- os.mkdir(new_report_path)
- except Exception as e:
- logger.error(e)
- return 98
- if upgrade_test_type == "mini_system_test":
- save_path = os.path.join(new_report_path)
- if exec_cmd(mini_path, sn, save_path, archive_path) == 98:
- return 98
- return True
- if not upgrade_test_type or upgrade_test_type == "smoke_test":
- test_return = cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_num, pr_url)
- if test_return == 1:
- return True
- if test_return == 98:
- return 98
- if test_return == 99:
- return 99
- else:
- return False
-
- @timeout(1000)
- def flash_version(self):
- partList = ["boot_linux", "system", "vendor", "userdata", "resource", "ramdisk", "chipset", "sys-prod", "chip-prod"]
- for i in partList:
- if not os.path.exists("%s/%s.img" % (local_image_path, i)):
- logger.printLog("%s.img is not exist, ignore" % i)
- continue
- loadcmd = "%s -s %s DI -%s %s/%s.img" % (loader_tool_path, LocationID, i, local_image_path, i)
- p = sendCmd(loadcmd)
- logger.info(p)
- # time.sleep(5)
- if "Download image ok" not in p:
- logger.info("try download %s again!" % i)
- time.sleep(1)
- second_cmd = "%s -s %s DI -%s %s/%s.img" % (loader_tool_path, LocationID, i, local_image_path, i)
- f = sendCmd(second_cmd)
- logger.info(f)
- if "Download image ok" not in f:
- logger.printLog("Failed to download the %s.img!" % i)
- if self.check_devices_mode():
- return 98
- else:
- return False
- return True
- else:
- logger.printLog("The %s.img downloaded successfully!" % i)
- return True
-
- @timeout(120)
- def check_devices_mode(self):
- check_times = 0
- while check_times < 5:
- check_mode_cmd = "%s LD" % loader_tool_path
- g = sendCmd(check_mode_cmd)
- logger.info(g)
- #time.sleep(40)
- if "LocationID=%s Mode=Loader" % LocationID in g:
- logger.info("3568 board has entered the Loader mode successfully!")
- return True
- else:
- #if test_num != "2/2":
- # hdc_kill()
- os.system("hdc_std -t %s shell reboot loader" % sn)
- time.sleep(5)
- check_times += 1
- logger.error("Failed to enter the loader mode!")
- return False
-
- @dec_stepmsg("download")
- @timeout(360)
- def download(self):
- '''
- #===================================================================================
- # @Method: download(self)
- # @Precondition: none
- # @Func: 构建下载到本地的路径,执行相应包的下载
- # @PostStatus: none
- # @eg: download()
- # @return: True or Flase
- #===================================================================================
- '''
- global version_savepath, version_name
- dir_path = CONSTANT.Path.getDirPath()
- if self.params_dict.get("pbiid"):
- version_path = self.params_dict.get("pbiid")
- version_name = str(uuid.uuid5(uuid.NAMESPACE_URL, str(self.params_dict.get("pbiid")) + "FASTBOOT"))
- version_savepath = os.path.join(dir_path, self.params_dict.get("flash_type"), version_name)
- else:
- version_path = self.params_dict.get("upgrade_upgradeLocation")
- version_name = str(uuid.uuid5(uuid.NAMESPACE_URL, (self.params_dict.get("upgrade_upgradeLocation"))))
- version_savepath = os.path.join(dir_path, version_name, "img")
- # 执行img下载
-
- if self.params_dict.get("isDownload") == "True":
- logger.printLog("不需要做下载,直接返回")
- return True
-
- import hashlib
- save_file_str = version_path.replace("/", "").replace("\\", "")
- save_file_name = hashlib.sha1(save_file_str.encode("utf-8")).hexdigest()
- logger.info("download hash string:%s, hash value:%s" % (save_file_str, save_file_name))
- save_path_file = os.path.join(dir_path, "record", "%s%s" % (save_file_name, ".txt"))
- if not self.excutedown(version_path, version_savepath, save_path_file, False):
- logger.error("download img fail")
- return 98
-
- # 保存本地版本路径给devicetest去版本路径下取用例
- saveVersion(save_path_file, version_savepath)
- return True
-
- def excutedown(self, source_path, download_dir, suc_mark, is_file):
- '''
- #===================================================================================
- # @Method: excutedown(source_path, download_dir, suc_mark, is_file)
- # @Precondition: none
- # @Func: 执行下载动作
- # @PostStatus: none
- # @Param: source_path:资源文件路径
- # download_dir:文件下载到本地的文件夹路径
- # is_file:是否是文件
- # @eg: excutedown("xxxx", "D:\\local\\image", suc_mark, Flase)
- # @return: True or Flase
- #===================================================================================
- '''
- failed_mark = os.path.join(download_dir, failed_file)
- lock_path = os.path.join(download_dir, lock_suffix)
- file_lock = FileLock()
-
- if isDownLoadSuccess(download_dir, suc_mark, failed_mark):
- return True
- try:
- nowtime = get_now_time_str_info()
- logger.printLog("%s Downloading, please wait" % nowtime)
- file_lock.lockFile(lock_path)
- ret = ""
- logger.info("Get lock. Start to ")
- try:
- if self.params_dict.get("bt_enable") and self.params_dict.get("bt_enable") == "True":
- ret = downloadByBitComet(source_path, download_dir, os_method)
- elif source_path.startswith('\\\\'):
- ret = downloadByCopy(source_path, download_dir, is_file)
- elif self.params_dict.get("pbiid"):
- ret = downlaodByDownloadTool(version_savepath, self.params_dict.get("version_type"), "FASTBOOT",
- self.params_dict.get("pbiid"))
- elif source_path.startswith("http"):
- ret = run_download(source_path, download_dir)
- except Exception as f:
- logger.error(f)
- logger.info("下载失败,间隔20秒,尝试再次下载。")
- time.sleep(20)
- ret = run_download(source_path, download_dir)
- if source_path.endswith(".zip"):
- zip_name = os.path.basename(source_path)
- ret = extractZipFile(os.path.join(download_dir, zip_name), download_dir)
- if source_path.endswith(".tar.gz") or (source_path.startswith("http") and ("file_id=" in source_path)):
- if source_path.startswith("http") and ("file_id=" in source_path):
- if source_path.endswith(".tar.gz"):
- zip_name = source_path.split('=')[-1]
- else:
- zip_name = "out.tar.gz"
- else:
- zip_name = os.path.basename(source_path)
- ret = unTarFile(os.path.join(download_dir, zip_name), download_dir)
- nowtime = get_now_time_str_info()
- logger.printLog("%s download to %s end" % (nowtime, download_dir))
-
- if not ret:
- with open(failed_mark, "a+") as fp:
- fp.write("")
- return ret
- except Exception as e:
- logger.printLog(e)
- #raise Exception(e)
- finally:
- file_lock.releaseFile()
-
-
-@timeout(30)
-def hdc_kill():
- logger.info("kill the process")
- os.system("hdc_std kill")
- time.sleep(2)
- logger.info("start the process")
- os.system("hdc_std -l5 start")
- # time.sleep(10)
-
-
-def sendCmd(mycmd):
- result = "".join(os.popen(mycmd).readlines())
- return result
-
-
-def send_times(mycmd):
- times = 0
- outcome = sendCmd(mycmd)
- while times < 3:
- if not outcome or "Empty" in outcome:
- times += 1
- time.sleep(3)
- else:
- time.sleep(3)
- return outcome
- return outcome
-
-
-@timeout(180)
-def start_cmd(sn):
- try:
- os.system("hdc_std -l5 start")
- power_cmd = "hdc_std -t %s shell \"power-shell setmode 602\"" % sn
- hilog_cmd = "hdc_std -t %s shell \"hilog -w start -l 400000000 -m none\"" % sn
- logger.info(power_cmd)
- logger.info(hilog_cmd)
- power_result = sendCmd(power_cmd)
- logger.info(power_result)
- if not power_result:
- return False
- number = 0
- while "Set Mode Success" not in power_result and number < 30:
- time.sleep(4)
- power_result = sendCmd(power_cmd)
- logger.info(power_result)
- number += 1
- if number >= 20:
- logger.error("Set mode failed")
- return False
- hilog_result = sendCmd(hilog_cmd)
- logger.info(hilog_result)
- return True
- except Exception as e:
- logger.error(e)
- return False
-
-
-@timeout(3600)
-def cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_num, pr_url):
- save_screenshot_path = os.path.join(new_report_path, "screenshot_result")
- logger.info(save_screenshot_path)
- time_sleep = random.randint(1, 5)
- time.sleep(time_sleep)
- try:
- if not os.path.exists(save_screenshot_path):
- os.mkdir(save_screenshot_path)
- logger.info(save_screenshot_path)
- base_screenshot_path = os.path.join(new_report_path, "screenshot_base")
- if not os.path.exists(base_screenshot_path):
- os.mkdir(base_screenshot_path)
- logger.info(base_screenshot_path)
- except Exception as e:
- logger.error(e)
- return 98
- config_path = os.path.join(screenshot_path, "resource", "app_capture_screen_test_config.json")
- py_cmd = "python %s --config %s --anwser_path %s --save_path %s --device_num %s --test_num %s --tools_path %s --pr_url %s" \
- % (py_path, config_path, resource_path, save_screenshot_path, sn, test_num, screenshot_path, pr_url)
- result = outCmd(py_cmd, save_screenshot_path, base_screenshot_path, resource_path)
- if result == 1:
- return True
- if result == 98:
- return 98
- if result == 99:
- return 99
- else:
- return False
-
-
-@timeout(3600)
-def outCmd(cmd, save_screenshot_path, base_screenshot_path, resource_path):
- logger.info("cmd is: %s" % cmd)
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="gbk")
- curline = p.stdout.readline()
- list_png_name = []
- try:
- while "End of check" not in curline:
- curline = p.stdout.readline()
- logger.info(curline)
- if "abnarmal" in curline:
- png_name = curline.split(" ")[3].split(".")[0]
- list_png_name.append(png_name)
- if "SmokeTest find some fatal problems" in curline:
- logger.error("SmokeTest find some fatal problems!")
- return 99
- except Exception as e:
- logger.error(e)
- logger.error("execute smoke_test.py failed!")
- return 99
- l = list(set(list_png_name))
- if l:
- logger.error(l)
- try:
- for i in l:
- result = os.path.join(resource_path, "%s.png" % i)
- base = os.path.join(base_screenshot_path, "%s.png" % i)
- shutil.copy(result, base)
- except Exception as t:
- logger.info(t)
- p.wait()
- logger.info("p.returncode %s" % p.returncode)
- if p.returncode == 0:
- logger.info("screenshot check is ok!")
- return True
- if p.returncode == 101:
- logger.error("device disconnection, please check the device!")
- return False
- logger.error("screenshot test failed, check the %s" % save_screenshot_path)
- return 98
-
-
-@timeout(1000)
-def exec_cmd(mini_path, sn, save_path, archive_path):
- cmd = "python %s --device_num %s --save_path %s --archive_path %s" % (mini_path, sn, save_path, archive_path)
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="gbk")
- curline = p.stdout.readline()
- try:
- while "End of check" not in curline:
- curline = p.stdout.readline()
- logger.info(curline)
- except Exception as e:
- logger.error(e)
- p.wait()
- logger.info("p.returncode %s" % p.returncode)
- if p.returncode == 0:
- logger.info("mini_system_test is ok!")
- return True
- logger.error("mini_system_test failed!")
- return 98
\ No newline at end of file
diff --git a/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app.py b/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app.py
index ecf6aa178bdf85d7485a65c1e08dd879e96ce2dd..78040074293d02fc34290a24df6c4b0736945fa8 100644
--- a/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app.py
+++ b/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app.py
@@ -8,7 +8,6 @@ import re
import shutil
import random
-
from core.base import BaseApp, dec_stepmsg
from util.file_locker import FileLock
from util.log_info import logger
@@ -18,13 +17,14 @@ from aw.Common.Constant import CONSTANT
from aw.Common.Common import getFileName
from aw.ExtractFile.ExtractFile import *
from aw.Common.Common import getHostIp, copyFile, copyDirectory
-
+total_time = ""
lock_suffix = CONSTANT.File.LOCK_SUFFIX
suc_file = CONSTANT.File.SUC_FILE
failed_file = CONSTANT.File.FAILED_FILE
REBOOT_TIMEOUT = 20000000
+
class liteOsUpgrade_RK3568(BaseApp):
'''
@author: cwx1076044
@@ -175,6 +175,8 @@ class liteOsUpgrade_RK3568(BaseApp):
else:
logger.info("Reboot successfully!")
logger.printLog("******下载完成,升级成功,开始进行冒烟测试******")
+ os.system("hdc_std -t %s shell hilog -w start" % sn)
+ os.system("hdc_std -t %s shell hilog -w start -t kmsg" % sn)
if upgrade_test_type == "null":
return True
screenshot_path = os.path.join(local_image_path, "screenshot")
@@ -196,6 +198,7 @@ class liteOsUpgrade_RK3568(BaseApp):
if exec_cmd(mini_path, sn, save_path, archive_path) == 98:
return 98
return True
+
if not upgrade_test_type or upgrade_test_type == "smoke_test":
test_return = cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_num, pr_url)
if test_return == 1:
@@ -436,8 +439,9 @@ def start_cmd(sn):
return False
-@timeout(3600)
+@timeout(900)
def cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_num, pr_url):
+ global total_time
save_screenshot_path = os.path.join(new_report_path, "screenshot_result")
logger.info(save_screenshot_path)
time_sleep = random.randint(1, 5)
@@ -456,7 +460,11 @@ def cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_
config_path = os.path.join(screenshot_path, "resource", "app_capture_screen_test_config.json")
py_cmd = "python %s --config %s --anwser_path %s --save_path %s --device_num %s --test_num %s --tools_path %s --pr_url %s" \
% (py_path, config_path, resource_path, save_screenshot_path, sn, test_num, screenshot_path, pr_url)
+ time1 = time.time()
result = outCmd(py_cmd, save_screenshot_path, base_screenshot_path, resource_path)
+ time2 = time.time()
+ total_time = int(time2 - time1)
+ logger.info("total_time: %s" % total_time)
if result == 1:
return True
if result == 98:
@@ -467,7 +475,7 @@ def cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_
return False
-@timeout(3600)
+@timeout(900)
def outCmd(cmd, save_screenshot_path, base_screenshot_path, resource_path):
logger.info("cmd is: %s" % cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="gbk")
@@ -492,8 +500,8 @@ def outCmd(cmd, save_screenshot_path, base_screenshot_path, resource_path):
logger.error(l)
try:
for i in l:
- result = os.path.join(resource_path, "%s.png" % i)
- base = os.path.join(base_screenshot_path, "%s.png" % i)
+ result = os.path.join(resource_path, "%s.jpeg" % i)
+ base = os.path.join(base_screenshot_path, "%s.jpeg" % i)
shutil.copy(result, base)
except Exception as t:
logger.info(t)
diff --git a/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app_old.py b/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app_old.py
deleted file mode 100644
index 1e91d2e60d4ad324761e171d6a33d91e7ab30f81..0000000000000000000000000000000000000000
--- a/DeployDevice/src/func/liteOsUpgrade/liteOsUpgrade_RK3568_app_old.py
+++ /dev/null
@@ -1,506 +0,0 @@
-# -*- coding:utf-8 -*-
-import uuid
-import sys
-import subprocess
-import os
-import time
-import re
-import shutil
-import random
-
-
-from core.base import BaseApp, dec_stepmsg
-from util.file_locker import FileLock
-from util.log_info import logger
-from util.time_info import get_now_time_str_info, get_now_time_info, Timeout, timeout
-from aw.Download.Download import *
-from aw.Common.Constant import CONSTANT
-from aw.Common.Common import getFileName
-from aw.ExtractFile.ExtractFile import *
-from aw.Common.Common import getHostIp, copyFile, copyDirectory
-
-lock_suffix = CONSTANT.File.LOCK_SUFFIX
-suc_file = CONSTANT.File.SUC_FILE
-failed_file = CONSTANT.File.FAILED_FILE
-REBOOT_TIMEOUT = 20000000
-
-
-class liteOsUpgrade_RK3568(BaseApp):
- '''
- @author: cwx1076044
- '''
-
- def __init__(self, param_file):
- super().__init__(param_file)
- self.param_List = ["upgrade_upgradeLocation", "sn"]
-
- @dec_stepmsg("hongmeng RK3568 flash")
- def excute(self):
- '''
- #===================================================================================
- # @Method: excute(self)
- # @Precondition: none
- # @Func: 升级执行入口
- # @PostStatus: none
- # @eg: excute()
- # @return: True or Flase
- #===================================================================================
- '''
- step_index = self.params_dict.get("step_list").index("liteOsUpgrade_RK3568_app")
-
- # 执行下载
- try:
- if not self.download():
- CONSTANT.ENVERRMESSAGE = "image download fail"
- logger.printLog(CONSTANT.ENVERRMESSAGE)
- return False
- except Exception as e:
- logger.error(e)
- raise e
-
- # 执行升级
- try:
- return_code = self.upgrade()
- if not return_code:
- CONSTANT.ENVERRMESSAGE = "board upgrade fail"
- logger.printLog(CONSTANT.ENVERRMESSAGE)
- return False
- if return_code == 98:
- return 98
- if return_code == 99:
- return 99
- return True
- except Exception as e:
- logger.error(e)
- raise e
-
- @dec_stepmsg("upgrade")
- @timeout(3600)
- def upgrade(self):
- '''
- #===================================================================================
- # @Method: upgrade(self)
- # @Precondition: none
- # @Func: 升级相关业务逻辑
- # @PostStatus: none
- # @eg: upgrade()
- # @return: True or Flase
- #===================================================================================
- '''
- global local_image_path, loader_tool_path, sn, LocationID ,test_num
- upgrade_test_type = self.params_dict.get("UpgradeTestType")
- sn = self.params_dict.get("sn")
- LocationID = self.params_dict.get("LocationID")
- test_num = self.params_dict.get("test_num")
- pr_url = self.params_dict.get("pr_url")
- logFilePath = self.logFilePath
- logger.info(logFilePath)
- r = logFilePath.rfind("\\")
- report_path = logFilePath[:r]
- logger.info(report_path)
- scriptpath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))))
- logger.info(scriptpath)
- local_image_path = os.path.join(version_savepath)
- logger.info(local_image_path)
- loader_tool_path = os.path.join(scriptpath, "resource", "RK3568_tool", "upgrade_tool.exe")
- logger.info(loader_tool_path)
- mini_path = os.path.join(local_image_path, "mini_system_test", "L2_mini_system_test.py")
- archive_path = os.path.join(version_savepath)
- if not self.check_devices_mode():
- check_devices_cmd = "hdc_std list targets"
- f = send_times(check_devices_cmd)
- logger.info(f)
- if not f or "Empty" in f:
- logger.error("No devices found,please check the device.")
- return False
- else:
- logger.info("3568 board is connected.")
- return self.check_devices_mode()
- else:
- # 下載鏡像
- upgrde_loader_cmd = "%s -s %s UL %s/MiniLoaderAll.bin -noreset" % (loader_tool_path, LocationID, local_image_path)
- h = sendCmd(upgrde_loader_cmd)
- logger.info(h)
- if "Upgrade loader ok" not in h:
- logger.error("Download MiniLoaderAll.bin Fail!")
- return False
- else:
- logger.printLog("Download MiniLoaderAll.bin Success!")
- # time.sleep(3)
- write_gpt_cmd = "%s -s %s DI -p %s/parameter.txt" % (loader_tool_path, LocationID, local_image_path)
- j = sendCmd(write_gpt_cmd)
- logger.info(j)
- if "Write gpt ok" not in j:
- logger.error("Failed to execute the parameter.txt")
- return False
- else:
- logger.printLog("Successfully executed parameter.txt.")
- # time.sleep(5)
- download_uboot_cmd = "%s -s %s DI -uboot %s/uboot.img %s/parameter.txt" % (
- loader_tool_path, LocationID, local_image_path, local_image_path)
- k = sendCmd(download_uboot_cmd)
- logger.info(k)
- if "Download image ok" not in k:
- logger.error("Failed to download the uboot.image!")
- if self.check_devices_mode():
- return 98
- return False
- else:
- logger.printLog("The uboot.image downloaded successfully!")
- # time.sleep(5)
- if not self.flash_version():
- return False
- reboot_devices_cmd = "%s -s %s RD" % (loader_tool_path, LocationID)
- reboot_result = sendCmd(reboot_devices_cmd)
- logger.info(reboot_result)
- time.sleep(30)
- try:
- if upgrade_test_type != "mini_system_test":
- if not start_cmd(sn):
- if self.check_devices_mode():
- return 98
- return False
- except Exception as t:
- logger.info(t)
- if self.check_devices_mode():
- return 98
- return False
- time.sleep(10)
- if "Reset Device OK" not in reboot_result:
- logger.error("Failed to reboot the board!")
- return False
- else:
- logger.info("Reboot successfully!")
- logger.printLog("******下载完成,升级成功,开始进行冒烟测试******")
- if upgrade_test_type == "null":
- return True
- screenshot_path = os.path.join(local_image_path, "screenshot")
- resource_path = os.path.join(screenshot_path, "resource")
- logger.info(resource_path)
- py_path = os.path.join(resource_path, "capturescreentest.py")
- new_report_path = os.path.join(report_path, "result")
- logger.info(new_report_path)
- time_sleep = random.randint(1, 5)
- time.sleep(time_sleep)
- try:
- if not os.path.exists(new_report_path):
- os.mkdir(new_report_path)
- except Exception as e:
- logger.error(e)
- return 98
- if upgrade_test_type == "mini_system_test":
- save_path = os.path.join(new_report_path)
- if exec_cmd(mini_path, sn, save_path, archive_path) == 98:
- return 98
- return True
- if not upgrade_test_type or upgrade_test_type == "smoke_test":
- test_return = cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_num, pr_url)
- if test_return == 1:
- return True
- if test_return == 98:
- return 98
- if test_return == 99:
- return 99
- else:
- return False
-
- @timeout(1000)
- def flash_version(self):
- partList = ["boot_linux", "system", "vendor", "userdata", "resource", "ramdisk", "chipset", "sys-prod", "chip-prod"]
- for i in partList:
- if not os.path.exists("%s/%s.img" % (local_image_path, i)):
- logger.printLog("%s.img is not exist, ignore" % i)
- continue
- loadcmd = "%s -s %s DI -%s %s/%s.img" % (loader_tool_path, LocationID, i, local_image_path, i)
- p = sendCmd(loadcmd)
- logger.info(p)
- # time.sleep(5)
- if "Download image ok" not in p:
- logger.info("try download %s again!" % i)
- time.sleep(1)
- second_cmd = "%s -s %s DI -%s %s/%s.img" % (loader_tool_path, LocationID, i, local_image_path, i)
- f = sendCmd(second_cmd)
- logger.info(f)
- if "Download image ok" not in f:
- logger.printLog("Failed to download the %s.img!" % i)
- if self.check_devices_mode():
- return 98
- else:
- return False
- return True
- else:
- logger.printLog("The %s.img downloaded successfully!" % i)
- return True
-
- @timeout(120)
- def check_devices_mode(self):
- check_times = 0
- while check_times < 5:
- check_mode_cmd = "%s LD" % loader_tool_path
- g = sendCmd(check_mode_cmd)
- logger.info(g)
- #time.sleep(40)
- if "LocationID=%s Mode=Loader" % LocationID in g:
- logger.info("3568 board has entered the Loader mode successfully!")
- return True
- else:
- #if test_num != "2/2":
- # hdc_kill()
- os.system("hdc_std -t %s shell reboot loader" % sn)
- time.sleep(5)
- check_times += 1
- logger.error("Failed to enter the loader mode!")
- return False
-
- @dec_stepmsg("download")
- @timeout(1800)
- def download(self):
- '''
- #===================================================================================
- # @Method: download(self)
- # @Precondition: none
- # @Func: 构建下载到本地的路径,执行相应包的下载
- # @PostStatus: none
- # @eg: download()
- # @return: True or Flase
- #===================================================================================
- '''
- global version_savepath, version_name
- dir_path = CONSTANT.Path.getDirPath()
- if self.params_dict.get("pbiid"):
- version_path = self.params_dict.get("pbiid")
- version_name = str(uuid.uuid5(uuid.NAMESPACE_URL, str(self.params_dict.get("pbiid")) + "FASTBOOT"))
- version_savepath = os.path.join(dir_path, self.params_dict.get("flash_type"), version_name)
- else:
- version_path = self.params_dict.get("upgrade_upgradeLocation")
- version_name = str(uuid.uuid5(uuid.NAMESPACE_URL, (self.params_dict.get("upgrade_upgradeLocation"))))
- version_savepath = os.path.join(dir_path, version_name, "img")
- # 执行img下载
-
- if self.params_dict.get("isDownload") == "True":
- logger.printLog("不需要做下载,直接返回")
- return True
-
- import hashlib
- save_file_str = version_path.replace("/", "").replace("\\", "")
- save_file_name = hashlib.sha1(save_file_str.encode("utf-8")).hexdigest()
- logger.info("download hash string:%s, hash value:%s" % (save_file_str, save_file_name))
- save_path_file = os.path.join(dir_path, "record", "%s%s" % (save_file_name, ".txt"))
- if not self.excutedown(version_path, version_savepath, save_path_file, False):
- logger.error("download img fail")
- return False
-
- # 保存本地版本路径给devicetest去版本路径下取用例
- saveVersion(save_path_file, version_savepath)
- return True
-
- def excutedown(self, source_path, download_dir, suc_mark, is_file):
- '''
- #===================================================================================
- # @Method: excutedown(source_path, download_dir, suc_mark, is_file)
- # @Precondition: none
- # @Func: 执行下载动作
- # @PostStatus: none
- # @Param: source_path:资源文件路径
- # download_dir:文件下载到本地的文件夹路径
- # is_file:是否是文件
- # @eg: excutedown("xxxx", "D:\\local\\image", suc_mark, Flase)
- # @return: True or Flase
- #===================================================================================
- '''
- failed_mark = os.path.join(download_dir, failed_file)
- lock_path = os.path.join(download_dir, lock_suffix)
- file_lock = FileLock()
-
- if isDownLoadSuccess(download_dir, suc_mark, failed_mark):
- return True
- try:
- nowtime = get_now_time_str_info()
- logger.printLog("%s Downloading, please wait" % nowtime)
- file_lock.lockFile(lock_path)
- ret = ""
- logger.info("Get lock. Start to ")
- if self.params_dict.get("bt_enable") and self.params_dict.get("bt_enable") == "True":
- ret = downloadByBitComet(source_path, download_dir, os_method)
- elif source_path.startswith('\\\\'):
- ret = downloadByCopy(source_path, download_dir, is_file)
- elif self.params_dict.get("pbiid"):
- ret = downlaodByDownloadTool(version_savepath, self.params_dict.get("version_type"), "FASTBOOT",
- self.params_dict.get("pbiid"))
- elif source_path.startswith("http"):
- ret = run_download(source_path, download_dir)
-
- if source_path.endswith(".zip"):
- zip_name = os.path.basename(source_path)
- ret = extractZipFile(os.path.join(download_dir, zip_name), download_dir)
- if source_path.endswith(".tar.gz") or (source_path.startswith("http") and ("file_id=" in source_path)):
- if source_path.startswith("http") and ("file_id=" in source_path):
- if source_path.endswith(".tar.gz"):
- zip_name = source_path.split('=')[-1]
- else:
- zip_name = "out.tar.gz"
- else:
- zip_name = os.path.basename(source_path)
- ret = unTarFile(os.path.join(download_dir, zip_name), download_dir)
- nowtime = get_now_time_str_info()
- logger.printLog("%s download to %s end" % (nowtime, download_dir))
-
- if not ret:
- with open(failed_mark, "a+") as fp:
- fp.write("")
- return ret
- except Exception as e:
- logger.printLog(e)
- raise Exception(e)
- finally:
- file_lock.releaseFile()
-
-
-@timeout(30)
-def hdc_kill():
- logger.info("kill the process")
- os.system("hdc_std kill")
- time.sleep(2)
- logger.info("start the process")
- os.system("hdc_std -l5 start")
- # time.sleep(10)
-
-
-def sendCmd(mycmd):
- result = "".join(os.popen(mycmd).readlines())
- return result
-
-
-def send_times(mycmd):
- times = 0
- outcome = sendCmd(mycmd)
- while times < 3:
- if not outcome or "Empty" in outcome:
- times += 1
- time.sleep(3)
- else:
- time.sleep(3)
- return outcome
- return outcome
-
-
-@timeout(180)
-def start_cmd(sn):
- try:
- os.system("hdc_std -l5 start")
- power_cmd = "hdc_std -t %s shell \"power-shell setmode 602\"" % sn
- hilog_cmd = "hdc_std -t %s shell \"hilog -w start -l 400000000 -m none\"" % sn
- logger.info(power_cmd)
- logger.info(hilog_cmd)
- power_result = sendCmd(power_cmd)
- logger.info(power_result)
- if not power_result:
- return False
- number = 0
- while "Set Mode Success" not in power_result and number < 30:
- time.sleep(4)
- power_result = sendCmd(power_cmd)
- logger.info(power_result)
- number += 1
- if number >= 20:
- logger.error("Set mode failed")
- return False
- hilog_result = sendCmd(hilog_cmd)
- logger.info(hilog_result)
- return True
- except Exception as e:
- logger.error(e)
- return False
-
-
-@timeout(3600)
-def cmd_test(screenshot_path, py_path, new_report_path, resource_path, sn, test_num, pr_url):
- save_screenshot_path = os.path.join(new_report_path, "screenshot_result")
- logger.info(save_screenshot_path)
- time_sleep = random.randint(1, 5)
- time.sleep(time_sleep)
- try:
- if not os.path.exists(save_screenshot_path):
- os.mkdir(save_screenshot_path)
- logger.info(save_screenshot_path)
- base_screenshot_path = os.path.join(new_report_path, "screenshot_base")
- if not os.path.exists(base_screenshot_path):
- os.mkdir(base_screenshot_path)
- logger.info(base_screenshot_path)
- except Exception as e:
- logger.error(e)
- return 98
- config_path = os.path.join(screenshot_path, "resource", "app_capture_screen_test_config.json")
- py_cmd = "python %s --config %s --anwser_path %s --save_path %s --device_num %s --test_num %s --tools_path %s --pr_url %s" \
- % (py_path, config_path, resource_path, save_screenshot_path, sn, test_num, screenshot_path, pr_url)
- result = outCmd(py_cmd, save_screenshot_path, base_screenshot_path, resource_path)
- if result == 1:
- return True
- if result == 98:
- return 98
- if result == 99:
- return 99
- else:
- return False
-
-
-@timeout(3600)
-def outCmd(cmd, save_screenshot_path, base_screenshot_path, resource_path):
- logger.info("cmd is: %s" % cmd)
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="gbk")
- curline = p.stdout.readline()
- list_png_name = []
- try:
- while "End of check" not in curline:
- curline = p.stdout.readline()
- logger.info(curline)
- if "abnarmal" in curline:
- png_name = curline.split(" ")[3].split(".")[0]
- list_png_name.append(png_name)
- if "SmokeTest find some fatal problems" in curline:
- logger.error("SmokeTest find some fatal problems!")
- return 99
- except Exception as e:
- logger.error(e)
- logger.error("execute smoke_test.py failed!")
- return 99
- l = list(set(list_png_name))
- if l:
- logger.error(l)
- try:
- for i in l:
- result = os.path.join(resource_path, "%s.png" % i)
- base = os.path.join(base_screenshot_path, "%s.png" % i)
- shutil.copy(result, base)
- except Exception as t:
- logger.info(t)
- p.wait()
- logger.info("p.returncode %s" % p.returncode)
- if p.returncode == 0:
- logger.info("screenshot check is ok!")
- return True
- if p.returncode == 101:
- logger.error("device disconnection, please check the device!")
- return False
- logger.error("screenshot test failed, check the %s" % save_screenshot_path)
- return 98
-
-
-@timeout(1000)
-def exec_cmd(mini_path, sn, save_path, archive_path):
- cmd = "python %s --device_num %s --save_path %s --archive_path %s" % (mini_path, sn, save_path, archive_path)
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="gbk")
- curline = p.stdout.readline()
- try:
- while "End of check" not in curline:
- curline = p.stdout.readline()
- logger.info(curline)
- except Exception as e:
- logger.error(e)
- p.wait()
- logger.info("p.returncode %s" % p.returncode)
- if p.returncode == 0:
- logger.info("mini_system_test is ok!")
- return True
- logger.error("mini_system_test failed!")
- return 98
\ No newline at end of file
diff --git a/OAT.xml b/OAT.xml
index 9857f5e54aca51dcabd9758b9a293a9aef196747..631ca8bd9406411b1f79442490689894adc9ccd2 100644
--- a/OAT.xml
+++ b/OAT.xml
@@ -65,14 +65,20 @@ Note:If the text contains special characters, please escape them according to th
+
+
-
+
+
+
-
+
+
+
@@ -80,4 +86,4 @@ Note:If the text contains special characters, please escape them according to th
-
\ No newline at end of file
+
diff --git a/README.md b/README.md
index c60415bf5fac36799e27a21844a4222675cbd107..7bfecdff7445f1ca29bd5e7e9c797e7548297fac 100644
--- a/README.md
+++ b/README.md
@@ -43,4 +43,4 @@
## 相关仓
-[**developtools\_integration\_verification**](https://gitee.com/openharmony/developtools_integration_verification/blob/master/README_zh.md)
\ No newline at end of file
+[**developtools\_integration\_verification**](https://gitee.com/openharmony/developtools_integration_verification)
diff --git a/cases/daily/mini_system/L0_mini_system_test.py b/cases/daily/mini_system/L0_mini_system_test.py
index 18f4eca95f0da17289abba515bb1abe18cf002d4..deb7e50e6def9a8bac5f46f4e3a8d7830a6734e6 100644
--- a/cases/daily/mini_system/L0_mini_system_test.py
+++ b/cases/daily/mini_system/L0_mini_system_test.py
@@ -25,8 +25,9 @@ import datetime
import serial
import threading
+
def GetDirSize(dir_path):
- if not os.path.exists(dir_path):
+ if not os.path.exists(dir_path):
PrintToLog("\n\nERROR: %s, dir are not exist!!!\n" % dir_path)
PrintToLog("End of check, test failed!")
sys.exit(99)
@@ -39,22 +40,27 @@ def GetDirSize(dir_path):
PrintToLog('total size: {:.2f}M'.format(size/1024/1024))
return size
+
def PrintToLog(str):
time = datetime.datetime.now()
str = "[{}] {}".format(time, str)
print(str)
- with open(os.path.join(args.save_path, 'L0_mini_test.log'), mode='a', encoding='utf-8') as log_file:
+ with open(os.path.join(args.save_path, 'L0_mini_test.log'),
+ mode='a',
+ encoding='utf-8') as log_file:
console = sys.stdout
sys.stdout = log_file
print(str)
sys.stdout = console
log_file.close()
+
def WriteToComPort(com_port, cmd):
len = com_port.write(cmd.encode('utf-8'))
print('{}'.format(len))
return
+
def ReadFromComPort(com_port, timeout):
time_start = datetime.datetime.now()
time_end = time_start
@@ -63,19 +69,24 @@ def ReadFromComPort(com_port, timeout):
while (time_end - time_start).seconds < timeout:
com_output_once = ''
while com_port.inWaiting() > 0:
- com_output_once += com_port.read(com_port.inWaiting()).decode()
+ com_output_once += com_port.read(com_port.inWaiting()).decode('ISO-8859-1')
if com_output_once != '':
com_output += com_output_once
print('{}'.format(com_output_once), end='')
time_end = datetime.datetime.now()
return com_output
+
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='manual to this script')
- parser.add_argument('--com_port', type=str, default = 'COM5')
- parser.add_argument('--com_baudrate', type=int, default = 115200)
- parser.add_argument('--save_path', type=str, default = 'D:\\DeviceTestTools\\screenshot')
- parser.add_argument('--archive_path', type=str, default = 'Z:\workspace\ohos_L2\ohos\out\hispark_pegasus\hispark_pegasus_mini_system')
+ parser.add_argument('--com_port', type=str, default='COM5')
+ parser.add_argument('--com_baudrate', type=int, default=115200)
+ parser.add_argument('--save_path', type=str,
+ default='D:\\DeviceTestTools\\screenshot')
+ parser.add_argument(
+ '--archive_path',
+ type=str,
+ default=r'Z:\workspace\ohos_L2\ohos\out\hispark_pegasus\hispark_pegasus_mini_system')
args = parser.parse_args()
com_port = serial.Serial(args.com_port, args.com_baudrate)
@@ -86,7 +97,8 @@ if __name__ == "__main__":
PrintToLog("End of check, test failed!")
sys.exit(99)
- read_com_thread = threading.Thread(target=ReadFromComPort, args=(com_port, 10))
+ read_com_thread = threading.Thread(target=ReadFromComPort,
+ args=(com_port, 10))
read_com_thread.setDaemon(True)
print('read wait:')
read_com_thread.start()
@@ -95,7 +107,8 @@ if __name__ == "__main__":
WriteToComPort(com_port, 'AT+SYSINFO\r\n')
print('enter AT+SYSINFO')
time.sleep(3)
- hivew_proc_find = re.findall('hiview,id=\d{1,3},status=\d{1,10},pri=\d{1,3},size=', com_output)
+ hivew_proc_find = re.findall(
+ 'hiview,id=\d{1,3},status=\d{1,10},pri=\d{1,3},size=', com_output)
print(hivew_proc_find)
if type(hivew_proc_find) == list and len(hivew_proc_find) > 0:
PrintToLog('hivew_proc found')
@@ -104,7 +117,8 @@ if __name__ == "__main__":
PrintToLog("End of check, test failed!")
sys.exit(99)
- target_file = os.path.normpath(os.path.join(args.archive_path, "OHOS_image.bin"))
+ target_file = os.path.normpath(os.path.join(args.archive_path,
+ "OHOS_image.bin"))
ret_size = os.path.getsize(target_file)/1024/1024
PrintToLog('Size of OHOS_image.bin : {:.2f}M'.format(ret_size))
if ret_size > 1:
@@ -116,4 +130,3 @@ if __name__ == "__main__":
PrintToLog("End of check, test succeeded!")
sys.exit(0)
-
diff --git a/cases/daily/mini_system/rk3568_chipset_only_test.py b/cases/daily/mini_system/rk3568_chipset_only_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..04e4694f3ec9ea9325f9997bfe0f3b026f97f4d4
--- /dev/null
+++ b/cases/daily/mini_system/rk3568_chipset_only_test.py
@@ -0,0 +1,207 @@
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ast import parse
+import json
+import sys
+import os
+import time
+import argparse
+import re
+import subprocess
+import shlex
+import datetime
+
+def GetDirSize(dir_path):
+ if not os.path.exists(dir_path):
+ PrintToLog("\n\nERROR: %s, dir are not exist!!!\n" % dir_path)
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+ size = 0
+ for root, dirs, files in os.walk(dir_path):
+ for name in files:
+ if not os.path.islink(os.path.join(root, name)):
+ sz = os.path.getsize(os.path.join(root, name))
+ #print('{} : {}byte'.format(os.path.join(root, name), sz))
+ size += sz
+ PrintToLog('total size: {:.2f}M'.format(size/1024/1024))
+ return size
+
+def PrintToLog(str):
+ time = datetime.datetime.now()
+ str = "[{}] {}".format(time, str)
+ print(str)
+ with open(os.path.join(args.save_path, 'L2_mini_test_{}.log'.format(args.device_num)), mode='a', encoding='utf-8') as log_file:
+ console = sys.stdout
+ sys.stdout = log_file
+ print(str)
+ sys.stdout = console
+ log_file.close()
+
+def EnterCmd(mycmd, waittime = 0, printresult = 1):
+ if mycmd == "":
+ return
+ global CmdRetryCnt
+ CmdRetryCnt = 1
+ EnterCmdRetry = 2
+ while EnterCmdRetry:
+ EnterCmdRetry -= 1
+ try:
+ p = subprocess.Popen(mycmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ result, unused_err = p.communicate(timeout=25)
+ try:
+ result=result.decode(encoding="utf-8")
+ except UnicodeDecodeError:
+ result=result.decode('gbk', errors='ignore')
+ break
+ except Exception as e:
+ result = 'retry failed again'
+ PrintToLog(e)
+ CmdRetryCnt += 1
+ p.kill()
+ if printresult == 1:
+ with open(os.path.join(args.save_path, 'mini_test_{}.bat'.format(args.device_num)), mode='a', encoding='utf-8') as cmd_file:
+ cmd_file.write(mycmd + '\n')
+ cmd_file.close()
+ PrintToLog(mycmd)
+ PrintToLog(result)
+ sys.stdout.flush()
+ if waittime != 0:
+ time.sleep(waittime)
+ if printresult == 1:
+ with open(os.path.join(args.save_path, 'mini_test_{}.bat'.format(args.device_num)), mode='a', encoding='utf-8') as cmd_file:
+ cmd_file.write("ping -n {} 127.0.0.1>null\n".format(waittime))
+ cmd_file.close()
+ return result
+
+def EnterShellCmd(shellcmd, waittime = 0, printresult = 1):
+ if shellcmd == "":
+ return
+ cmd = "hdc_std -t {} shell \"{}\"".format(args.device_num, shellcmd)
+ return EnterCmd(cmd, waittime, printresult)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description='manual to this script')
+ parser.add_argument('--save_path', type=str, default = 'D:\\DeviceTestTools\\screenshot')
+ parser.add_argument('--device_num', type=str, default = 'null')
+ parser.add_argument('--archive_path', type=str, default = 'Z:\workspace\ohos_L2\ohos\out\\rk3568\packages\phone')
+ args = parser.parse_args()
+
+ if args.device_num == 'null':
+ result = EnterCmd("hdc_std list targets", 1, 0)
+ print(result)
+ args.device_num = result.split()[0]
+
+ PrintToLog("\n\n########## First check key processes start ##############")
+ lose_process = []
+ process_pid = {}
+
+ two_check_process_list = ['huks_service', 'hilogd', 'hdf_devmgr', 'samgr', 'foundation', 'accesstoken_ser']
+ other_process_list = ['softbus_server', 'deviceauth_service']
+
+ for pname in two_check_process_list:
+ pids = EnterCmd("hdc_std -t {} shell pidof {}".format(args.device_num, pname), 0, 1)
+ try:
+ pidlist = pids.split()
+ int(pidlist[0])
+ for pid in pidlist:
+ int(pid)
+ process_pid[pname] = pidlist
+ except:
+ lose_process.append(pname)
+ all_p = EnterShellCmd("ps -elf")
+ for pname in other_process_list:
+ findp = all_p.find(pname, 0, len(all_p))
+ if findp == -1:
+ lose_process.append(pname)
+
+ if lose_process:
+ PrintToLog("\n\nERROR: %s, These processes are not exist!!!\n" % lose_process)
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+ else:
+ PrintToLog("First processes check is ok\n")
+
+ time.sleep(10)
+
+ # key processes second check, and cmp to first check
+ PrintToLog("\n\n########## Second check key processes start ##############")
+ second_check_lose_process = []
+ # for pname in two_check_process_list + other_process_list:
+ for pname in two_check_process_list:
+ pids = EnterCmd("hdc_std -t {} shell pidof {}".format(args.device_num, pname), 0, 1)
+ try:
+ pidlist = pids.split()
+ if process_pid[pname] != pidlist:
+ if pname in two_check_process_list:
+ PrintToLog("ERROR: pid of %s is different the first check" % pname)
+ PrintToLog("SmokeTest find some fatal problems!")
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+ else:
+ PrintToLog("WARNNING: pid of %s is different the first check" % pname)
+ elif len(pidlist) != 1:
+ if pname in two_check_process_list:
+ PrintToLog("ERROR: pid of %s is not only one" % pname)
+ PrintToLog("SmokeTest find some fatal problems!")
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+ else:
+ PrintToLog("WARNNING: pid of %s is not only one" % pname)
+ except:
+ second_check_lose_process.append(pname)
+
+ if second_check_lose_process:
+ PrintToLog("ERROR: pid of %s is not exist" % pname)
+ PrintToLog("SmokeTest find some fatal problems!")
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+ else:
+ PrintToLog("Second processes check is ok\n")
+
+ target_dir = os.path.normpath(os.path.join(args.archive_path, "system"))
+ PrintToLog(target_dir)
+ ret_size = GetDirSize(target_dir)/1024/1024
+ PrintToLog('Size of system is :{:.2f}M'.format(ret_size))
+ if ret_size > 50:
+ PrintToLog('ERROR: Size of system({:.2f}M) is over the upper limit(50M)'.format(ret_size))
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+
+ target_dir = os.path.normpath(os.path.join(args.archive_path, "data"))
+ ret_size = GetDirSize(target_dir)/1024/1024
+ PrintToLog('Size of data is :{:.2f}M'.format(ret_size))
+ if ret_size > 50:
+ PrintToLog('ERROR: Size of data({:.2f}M) is over the upper limit(50M)'.format(ret_size))
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+
+ target_dir = os.path.normpath(os.path.join(args.archive_path, "updater"))
+ ret_size = GetDirSize(target_dir)/1024/1024
+ PrintToLog('Size of updater is :{:.2f}M'.format(ret_size))
+ if ret_size > 50:
+ PrintToLog('ERROR: Size of updater({:.2f}M) is over the upper limit(50M)'.format(ret_size))
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+
+ target_dir = os.path.normpath(os.path.join(args.archive_path, "vendor"))
+ ret_size = GetDirSize(target_dir)/1024/1024
+ PrintToLog('Size of vendor is :{:.2f}M'.format(ret_size))
+ if ret_size > 50:
+ PrintToLog('ERROR: Size of vendor({:.2f}M) is over the upper limit(50M)'.format(ret_size))
+ PrintToLog("End of check, test failed!")
+ sys.exit(99)
+
+ PrintToLog("All testcase is ok")
+ PrintToLog("End of check, test succeeded!")
+ sys.exit(0)
diff --git "a/cases/smoke/basic/screenshot32/APL_compare_03/APL\345\237\272\347\272\277\346\240\207\345\207\206v1.0.json" "b/cases/smoke/basic/screenshot32/APL_compare_03/APL\345\237\272\347\272\277\346\240\207\345\207\206v1.0.json"
new file mode 100644
index 0000000000000000000000000000000000000000..7f7eec0d380b42b57fbc5de35c271b71f01f40dc
--- /dev/null
+++ "b/cases/smoke/basic/screenshot32/APL_compare_03/APL\345\237\272\347\272\277\346\240\207\345\207\206v1.0.json"
@@ -0,0 +1,47 @@
+[
+ {
+ "bundle&processName": "com.ohos.launcher",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.settings",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.systemui",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.screenlock",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.adminprovisioning",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "edm",
+ "apl": "3"
+ },
+ {
+ "bundle&processName": "com.ohos.settings.faceauth",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "cn.openharmony.inputmethodchoosedialog",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"media_service",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.amsdialog",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.useriam.authwidget",
+ "apl":"2"
+ }
+
+]
\ No newline at end of file
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/apl_config.py b/cases/smoke/basic/screenshot32/APL_compare_03/apl_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6bc30b516982e60ada6c084383812977e1abeac
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/apl_config.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os
+# PATH='D:\\repo_test\\APL_compare_02\\'
+# PATH=os.getcwd()+'/'
+PATH = os.path.dirname(os.path.realpath(__file__)) + os.sep
+# read_excel.py
+'''
+SHEET_NAME:excel中的表名,中英文都可
+COLS:excel中的列号,从0开始
+SVN:SVN的安装目录下/bin目录(SVN在环境变量中的位置)
+SVN_URL:excel文件对应的url
+USER:svn的用户名
+PWD:svn的密码
+FILE_PATH:本地下载文件的路径
+'''
+SHEET_NAME="Sheet1"
+COLS=[1,3]
+
+SVN='D:/TortoiseSVN/bin'
+SVN_URL='https://PMAIL_2140981.china.huawei.com/svn/test测试/01 目录/01_1 目录/APL基线标准v1.0.xlsx'
+USER='hhhhs'
+PWD='123456'
+FILE_PATH=PATH+SVN_URL.split('/')[-1]
+
+# read_device.py
+'''
+SQL_SRC:设备上的数据库路径
+SQL_DES:本地下载文件路径
+DOWNLOAD_DB:从设备下载的hdc命令
+QUERY_HAP_APL:查询HAP APL的sql语句(查询多列可以依次添加字段,添加字段的顺序为比较时的字段优先级)
+QUERY_NATIVE_APL:查Native APL的sql语句
+'''
+SQL_SRC=" /data/service/el1/public/access_token/access_token.db"
+SQL_DES=PATH
+DOWNLOAD_DB="hdc -t {} file recv"
+QUERY_HAP_APL="select bundle_name,apl from hap_token_info_table"
+QUERY_NATIVE_APL="select process_name,apl from native_token_info_table"
+
+'''
+APL_LOG_FILE:执行脚本的日志信息
+APL_RECORD_PATH:APL对比记录的日志信息
+IS_OVERWRITE:是否覆盖之前的APL日志,w表示覆盖,a表示追加
+'''
+APL_LOG_FILE=PATH+'apl_compare.log'
+APL_RECORD_PATH=PATH+'apl_record.txt'
+IS_OVERWRITE='w'
\ No newline at end of file
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/common.py b/cases/smoke/basic/screenshot32/APL_compare_03/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..2cf1c74ed6080c9f5d58f2f259145b6d08761cb9
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/common.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#!/usr/bin/python3
+import math
+import enum
+import time
+import logging
+import threading
+from apl_config import *
+
+log_tag = 'common'
+
+apl_file_log = logging.FileHandler(filename=APL_LOG_FILE, mode='a', encoding='utf-8')
+fmt = logging.Formatter(fmt="%(asctime)s %(message)s", datefmt='%Y-%m-%d %H:%M:%S %a')
+apl_file_log.setFormatter(fmt)
+
+# 定义日志
+apl_logger = logging.Logger(name = 'apl_compare_log', level=logging.INFO)
+apl_logger.addHandler(apl_file_log)
+
+class ErrorType(enum.Enum):
+ not_in_apl_table = 1
+ apl_is_invalid = 2
+
+class ApiLevel(enum.Enum):
+ normal = 1
+ system_basic = 2
+ system_core = 3
+
+class LogLevel(enum.Enum):
+ Error = 1
+ Info = 2
+
+class AplCompareException(Exception):
+ def __init__(self, msg):
+ self.msg = msg
+
+class AplCompareThread(threading.Thread):
+ def __init__(self, func, args=()):
+ super(AplCompareThread, self).__init__()
+ self.func = func
+ self.args = args
+ self.result = None
+ def run(self):
+ self.result = self.func(*self.args)
+ def get_result(self):
+ threading.Thread.join(self)
+ try:
+ return self.result
+ except Exception as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, '{}'.format(e.args[0]))
+ return None
+
+def apl_log(msg):
+ # 写日志
+ apl_logger.info(msg)
+
+def apl_set_log_content(level, tag, msg):
+ log_content = timestamp() + ' {}'.format(level) + ' [{}]'.format(tag) + ' {}'.format(msg)
+ print(log_content)
+ apl_log(log_content)
+ return(log_content)
+
+def set_error_record(name,error):
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())+' %(name)-50s: %(error)-50s\n'%{'name':name,'error':error}
+
+def set_map(results):
+ if results == None:
+ return None
+ res_map = {}
+ for result in results:
+ res_map[result[0]] = set_value(result[1:])
+ return res_map
+
+def set_value(result):
+ value = []
+ for res in result:
+ if math.isnan(res):
+ res = 0
+ value.append(res)
+ return value
+
+def timestamp():
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/compare.py b/cases/smoke/basic/screenshot32/APL_compare_03/compare.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ee180fdf89736763ed6faabd49ac2ac796be2f8
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/compare.py
@@ -0,0 +1,200 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#!/usr/bin/python3
+import time
+import sys
+import os
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+from read_device import *
+from read_excel import *
+from apl_config import *
+
+def whitelist_check(apl, value, fields_from_whitelist):
+ # True 包含在白名单内
+ check = value in fields_from_whitelist.keys()
+ is_pass = False
+ if check and str(apl) == fields_from_whitelist[value]:
+ is_pass = True
+ return is_pass
+
+def compare_hap_apl(fields_from_device, fields_from_whitelist):
+ records = []
+ log_tag = 'compare_hap_apl'
+ hap_check = True
+ for value in fields_from_device:
+ apl = fields_from_device[value][0]
+ if apl > 1:
+ is_pass = whitelist_check(apl, value, fields_from_whitelist)
+ info = 'bundleName = {} apl = {}'.format(value, str(apl))
+ if is_pass == False:
+ hap_check = False
+ # info = value
+ # info = 'bundleName = {} apl = {}'.format(value, str(apl))
+ log_content = apl_set_log_content(LogLevel(1).name, log_tag, info)
+ records.append(log_content)
+ else:
+ apl_set_log_content(LogLevel(2).name, log_tag, info)
+ return records, hap_check
+
+def compare_native_apl(fields_from_device, fields_from_whitelist):
+ records = []
+ log_tag = 'compare_native_apl'
+ native_check = True
+ for value in fields_from_device:
+ apl = fields_from_device[value][0]
+ if apl > 2:
+ info = 'processName = {} apl = {}'.format(value, str(apl))
+ is_pass = whitelist_check(apl, value, fields_from_whitelist)
+ if is_pass == False:
+ native_check = False
+ log_content = apl_set_log_content(LogLevel(1).name, log_tag, info)
+ records.append(log_content)
+ else:
+ apl_set_log_content(LogLevel(2).name, log_tag, info)
+ return records, native_check
+
+def fields_compare_write_once(fields_from_device,fields_from_excel):
+ records=[]
+ for bundle_name in fields_from_device.keys():
+ if bundle_name not in fields_from_excel.keys():
+ record=(bundle_name,ErrorType(1).name)
+ records.append(record)
+ continue
+
+ fields=fields_from_device[bundle_name]
+ standard_fields=fields_from_excel[bundle_name]
+ if not isInvalid(fields,standard_fields):
+ record=(bundle_name,ErrorType(2).name)
+ records.append(record)
+ print('Compare successful!')
+ return records
+
+
+def isInvalid(fields,standard_fields):
+ if len(fields) == 1:
+ return fields[0] <= standard_fields[0]
+
+ for field, standard_field in fields, standard_fields:
+ if field>standard_field:
+ return False
+ return True
+
+def write_record(name,error):
+ try:
+ file = open(APL_RECORD_PATH,'a')
+ err_record = set_error_record(name, error)
+ file.write(err_record)
+ file.close()
+ except Exception as e:
+ log_content=apl_set_log_content(str(s))
+ apl_log(log_content)
+
+def write_record_once(err_records,is_overwrite):
+ try:
+ file=open(APL_RECORD_PATH,is_overwrite)
+ for record in err_records:
+ err_record = set_error_record(record[0],record[1])
+ file.write(err_record)
+ file.close()
+ except Exception as e:
+ log_content=apl_set_log_content(str(e))
+ apl_log(log_content)
+
+def excel_thread():
+ try:
+ # settings={
+ # ' svn': SVN,
+ # 'url': url_encode(SVN_URL),
+ # 'user': USER,
+ # 'pwd': PWD,
+ # 'dir': FILE_PATH,
+ # }
+ # excel_file = FILE_PATH #svn_checkout(settings)
+ log_tag = 'excel_thread'
+ # if excel_file == None:
+ # apl_set_log_content(LogLevel(2).name, log_tag, 'svn_checkoutc failed') #raise
+ # apl_from_excel = read_excel(excel_file, sheet = SHEET_NAME, cols = COLS)
+ # path = PATH + 'APL基线标准v1.0.json'
+ path = PATH + 'temp.json'
+ apl_from_json = read_json(path)
+ return apl_from_json
+ except Exception as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, 'excel_thread catch error: {}'.format(e.args[0]))
+ return None
+
+def sql_thread(sn, sn2):
+ try:
+ print(DOWNLOAD_DB.format(sn)+' ' + SQL_SRC + ' ' + SQL_DES)
+ print()
+ log_tag = 'sql_thread'
+ sql_file = download_from_device(DOWNLOAD_DB.format(sn), SQL_SRC, SQL_DES)
+ if sql_file == None:
+ raise
+ query_hap_apl_thread = AplCompareThread(query_hap_apl, (sql_file, QUERY_HAP_APL))
+ query_native_apl_thread = AplCompareThread(query_native_apl, (sql_file, QUERY_NATIVE_APL))
+
+ query_hap_apl_thread.start()
+ query_native_apl_thread.start()
+
+ query_native_apl_thread.join()
+ query_native_apl_thread.join()
+
+ hap_apl_map = query_hap_apl_thread.get_result()
+ native_apl_map = query_native_apl_thread.get_result()
+
+ return hap_apl_map, native_apl_map
+ except:
+ apl_set_log_content(LogLevel(1).name, log_tag, 'download_from_device failed')
+ return None,None
+
+def apl_check_main(sn):
+ try:
+ log_tag = 'Main'
+ apl_set_log_content(LogLevel(2).name, log_tag, '--------APL Check Begin!--------')
+ excel_thr = AplCompareThread(excel_thread)
+ sql_thr = AplCompareThread(sql_thread, (sn, sn))
+
+ excel_thr.start()
+ sql_thr.start()
+
+ excel_thr.join()
+ sql_thr.join()
+
+ apl_from_excel = excel_thr.get_result()
+ hap_apl_map, native_apl_map = sql_thr.get_result()
+
+ if apl_from_excel == None or hap_apl_map == None or native_apl_map == None:
+ raise
+ hap_results, hap_check = compare_hap_apl(hap_apl_map, apl_from_excel)
+ native_results, native_check = compare_native_apl(native_apl_map, apl_from_excel)
+ write_record_once(hap_results, IS_OVERWRITE)
+ write_record_once(native_results, 'a')
+ if native_check == False or hap_check == False:
+ apl_set_log_content(LogLevel(1).name, log_tag, '--------APL Check failed![hap = {}, native = {}] --------'.format(hap_check, native_check))
+ apl_set_log_content(LogLevel(2).name, log_tag, '--------APL Check End! --------')
+ except Exception as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, '--------APL Check failed![hap = False, native = False] --------')
+ apl_set_log_content(LogLevel(1).name, log_tag, "{}".format(e.args[0]))
+
+if __name__ == '__main__':
+ try:
+ sn = sys.argv[1]
+ except:
+ sn_list = []
+ result = os.popen('hdc list targets')
+ res = result.read()
+ for line in res.splitlines():
+ sn_list.append(line)
+ sn = sn_list[0]
+ apl_check_main(sn)
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/read_device.py b/cases/smoke/basic/screenshot32/APL_compare_03/read_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..114a731d35825b9f28719ded814bd3f522e28c26
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/read_device.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#!/usr/bin/python3
+
+from subprocess import run
+import os
+import sqlite3
+
+import sys
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+from common import *
+from apl_config import *
+log_tag = 'read_device'
+
+#从设备中导出数据库
+def download_from_device(cmd,sql_src,sql_des):
+ download_cmd=cmd+' '+sql_src+' '+sql_des
+ apl_set_log_content(LogLevel(2).name, log_tag, 'database start downloading!')
+ try:
+ result = os.popen(download_cmd)
+ stdout = result.read()
+ print(stdout)
+ if 'Fail' in stdout:
+ raise AplCompareException(stdout.replace('\n\n','').replace('[Fail]', ''))
+ #sql_file=sql_des+'\\'+sql_src.split('/').pop()
+ sql_file = sql_des+sql_src.split('/').pop()
+ apl_set_log_content(LogLevel(2).name, log_tag, '{} download successful!'.format(sql_file))
+ return sql_file
+ except Exception as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, '{}'.format(e.msg))
+ return None
+
+
+def sql_connect(db):
+ try:
+ if not os.path.exists(db):
+ raise AplCompareException('{} is not exists!'.format(db))
+ conn = sqlite3.connect(db)
+ return conn
+ except AplCompareException as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, '{}'.format(e.msg))
+ return None
+
+#数据库语句查询
+def query_records(db,sql):
+ log_content = ''
+ try:
+ conn = sql_connect(db)
+ if conn == None:
+ raise AplCompareException('{} cannot connect!'.format(db))
+ cursor = conn.cursor()
+ cursor.execute(sql)
+ results = cursor.fetchall()
+ conn.close()
+ apl_set_log_content(LogLevel(2).name, log_tag, '"{}" query successful!'.format(sql))
+ return results
+ except sqlite3.OperationalError as e:
+ apl_set_log_content(LogLevel(2).name, log_tag, 'database {}'.format(e.args[0]))
+ return None
+ except AplCompareException as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, '{}'.format(e.msg))
+ return None
+
+#查询hap_token_info_table中的bundle_name和apl
+def query_hap_apl(db,sql):
+ results = query_records(db, sql)
+ return set_map(results)
+
+#查询native_token_info_table中的process_name和apl
+def query_native_apl(db,sql):
+ results = query_records(db, sql)
+ return set_map(results)
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/read_excel.py b/cases/smoke/basic/screenshot32/APL_compare_03/read_excel.py
new file mode 100644
index 0000000000000000000000000000000000000000..20b6a25af87260df78d8d5febfcb4afc9756c889
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/read_excel.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#!/usr/bin/python3
+
+import subprocess
+import pandas as pd
+import urllib.parse
+import os
+import sys
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+from common import *
+from apl_config import *
+import json
+log_tag = 'read_whitelist'
+
+# 全部文件夹检出(本地已经安装svn)
+def svn_checkout(settings):
+ try:
+ print(settings['url'])
+ print(settings['dir'])
+ os.chdir(settings['svn'])
+ cmd = 'svn export --force %(url)s %(dir)s --username %(user)s --password %(pwd)s'%settings
+ p = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
+ stdout,stderr = p.communicate()
+ print(stderr)
+ if stderr != b'':
+ raise AplCompareException(str(stderr,'utf-8').replace('\r\n','\t'))
+ apl_set_log_content(LogLevel(2).name, log_tag, '{} export successful!'.format(settings['dir']))
+ return settings['dir']
+ except Exception as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, "{}".format(e.msg))
+ return None
+
+#url编码
+def url_encode(url):
+ partions=url.split("/",3)
+ encode_url=partions[0]
+ partions[-1]=urllib.parse.quote(partions[-1])
+ for partion in partions[1:]:
+ encode_url=encode_url+'/'+partion
+ return encode_url
+
+def read_excel(file, sheet, cols):
+ try:
+ df = pd.read_excel(file, sheet_name = sheet, usecols = cols)
+ data_list = df.values.tolist()
+ apl_map = set_map(data_list)
+ apl_set_log_content(LogLevel(2).name, log_tag, '{} read successful!'.format(file))
+ return apl_map
+ except (ValueError,FileNotFoundError) as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, "{}".format(e.msg))
+ return None
+
+
+def read_json(path):
+ try:
+ with open(path, 'r') as f:
+ file = f.read()
+ data_list = json.loads(file)
+ res_dict = set_dict(data_list)
+ return res_dict
+ except Exception as e:
+ apl_set_log_content(LogLevel(1).name, log_tag, '{}'.format(e.msg))
+ return None
+
+def set_dict(data_list: list()):
+ res_dict = {}
+ for res in data_list:
+ res_dict[res['bundle&processName']] = res['apl']
+ return res_dict
\ No newline at end of file
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/readme.md b/cases/smoke/basic/screenshot32/APL_compare_03/readme.md
new file mode 100644
index 0000000000000000000000000000000000000000..80db38344517934297a2282248d9f8ac5e019f04
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/readme.md
@@ -0,0 +1,121 @@
+## 版本
+python版本:3.8.10
+pip版本:22.1.2
+python依赖:
+
+```
+pip install pandas
+pip install openpyxl
+pip install subprocess
+```
+## 使用
+`python compare.py`
+
+## 目录
+```
+APL_compare
+├── apl_config.py # 整个目录中的常量定义
+├── read_device.py # 从设备中下载db并解析表和字段的函数
+├── read_excel.py # 从excel中解析表和字段的函数
+├── compare.py # 脚本运行入口
+└── common.py # 公共需要用到的函数
+```
+## apl_config.py
+常量定义
+`PATH`:当前目录的地址
+### read_excel.py
+`SHEET_NAME`:excel中的表名
+`COLS`:excel中的列名,下标从0开始
+`SVN`:SVN的安装目录下的bin目录
+`SVN_URL`:excel文件在SVN上对应的url
+`USER`:svn的用户名
+`PWD`:svn的密码
+`FILE_PATH`:本地下载文件的路径
+`SQL_SRC`:设备上的数据库路径
+`SQL_DES`:本地下载文件路径
+`DOWNLOAD_DB`:从设备下载的hdc命令
+`QUERY_HAP_APL`:查询HAP APL的sql语句(查询多列可以依次添加字段,添加字段的顺序为比较时的字段优先级)
+`QUERY_NATIVE_APL`:查Native APL的sql语句
+`APL_LOG_FILE`:执行脚本的日志路径
+`APL_RECORD_PATH`:APL对比记录的日志路径
+`IS_OVERWRITE`:是否覆盖之前的APL日志,w表示覆盖,a表示追加
+
+## read_device.py
+用于从设备上导出数据库,并解析表和字段
+### 数据库导出
+函数:`download_from_device(cmd,sql_src,sql_des)`
+hdc命令:`cmd`
+设备中数据库路径:`sql_src`
+本地数据库路径:`sql_des`
+执行命令:`hdc file recv sql_src sql_des`
+### 连接数据库
+相关函数:`sql_connect(db)`
+传入参数:`db`--db文件存放路径
+返回结果:`conn`--数据库的连接
+### sql语句查询
+相关函数:`query_records(db,sql)`
+传入参数:`db`--需要连接的数据库;`sql`:sql查询语句
+返回结果:`results`--查询结果
+### 查hap_token_info_table中的bundle_name和apl
+sql语句:`QUERY_HAP_APL="select bundle_name,apl from hap_token_info_table"`
+相关函数:`query_hap_apl(db,sql)`
+传入参数:`db`--需要连接的数据库;`sql`:sql查询语句
+返回结果:`res_map`--查询结果转化为的字典(map,key是bundle_name,value是apl)
+### 查询native_token_info_table中的process_name和apl
+sql语句:`QUERY_NATIVE_APL="select process_name,apl from native_token_info_table"`
+相关函数:`query_native_apl(db,sql)`
+传入参数:`db`--需要连接的数据库;`sql`--sql查询语句
+返回结果:`res_map`--查询结果转化为的字典(map,key是process_name,value是apl)
+
+## read_excel.py
+### 从svn上下载excel
+相关函数:`syn_checkout(settings)`
+传入参数:`settings`--包含svn上文件路径,本地路径,用户名,密码
+返回结果:`settings['dir']`--本地下载路径
+### url编码
+相关函数:`url_encode(url)`
+传入参数:`url`
+返回结果:`encode_url`
+
+### 解析excel
+相关函数:`read_excel(file,sheet,cols)`
+传入参数:`file`--excel文件,`sheet`--表名,`cols`--列名
+返回结果:`apl_map`----查询结果转化为的字典(map,key是bundle/process_name,value是apl)
+
+## common.py
+### 脚本执行过程中的错误日志
+相关函数:`log(msg)`
+相关参数:`msg`--错误信息
+### 设置脚本执行过程中的日志信息
+相关函数:`apl_set_log_content(msg)`
+相关参数:`msg`--日志信息,`is_error`--用于判断是执行失败、成功
+返回结果:带时间戳的日志信息
+
+### 设置apl记录的格式
+相关函数:set_error_record(name,error)
+相关参数:`name`--bundle name或者native name,`error`--错误原因
+返回结果:带时间戳的记录
+
+### 将查询结果转化成map的结构
+相关函数:`set_map(results)`
+传入参数:`results`--查询结果的列表
+返回结果:`res_map`
+### 转换查询结果map的value格式
+相关函数:`set_value(result)`
+传入参数:`result`--查询到的每一行结果
+返回结果:`value`--包含查询到的字段的列表
+### 时间戳
+相关函数:`timestamp()`
+返回结果:时间戳
+
+### 错误类型
+`ErrorType`:枚举类
+
+### 自定义异常
+`AplCompareException`
+
+### 自定义线程
+`AplCompareThread`
+
+### 日志格式设置
+`logging.basicConfig`
\ No newline at end of file
diff --git a/cases/smoke/basic/screenshot32/APL_compare_03/temp.json b/cases/smoke/basic/screenshot32/APL_compare_03/temp.json
new file mode 100644
index 0000000000000000000000000000000000000000..321828c2aad74c32bfebb9d70d3e1630fb38a127
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/APL_compare_03/temp.json
@@ -0,0 +1,86 @@
+[
+ {
+ "bundle&processName": "com.ohos.launcher",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.settings",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.systemui",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.screenlock",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "com.ohos.adminprovisioning",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "edm",
+ "apl": "3"
+ },
+ {
+ "bundle&processName": "com.ohos.settings.faceauth",
+ "apl": "2"
+ },
+ {
+ "bundle&processName": "cn.openharmony.inputmethodchoosedialog",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"media_service",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.amsdialog",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.useriam.authwidget",
+ "apl":"2"
+ },
+ {
+ "bundle&processName":"com.ohos.powerdialog",
+ "apl":"2"
+ },
+ {
+ "bundle&processName":"com.ohos.filepicker",
+ "apl":"2"
+ },
+ {
+ "bundle&processName":"com.ohos.camera",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.smartperf",
+ "apl":"2"
+ },
+ {
+ "bundle&processName":"com.ohos.devicemanagerui",
+ "apl":"2"
+ },
+ {
+ "bundle&processName":"ohos.telephony.resources",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.notificationdialog",
+ "apl":"2"
+ },
+ {
+ "bundle&processName":"ohos.samples.distributedcalc",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"ohos.samples.distributedmusicplayer",
+ "apl":"3"
+ },
+ {
+ "bundle&processName":"com.ohos.note",
+ "apl":"3"
+ }
+]
\ No newline at end of file
diff --git a/cases/smoke/basic/screenshot32/acls_check/acl_check.py b/cases/smoke/basic/screenshot32/acls_check/acl_check.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6781a178a1902a19e8de78a78c1c145a0d1b55b
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/acls_check/acl_check.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import sys
+
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+from resolve_token_info import *
+from read_acl_whitelist import *
+
+log_tag = 'acl_check'
+
+
+def whitelist_check(whitelist, acls):
+ try:
+ set_log_content(LogLevel(2).name, log_tag + '->whitelist_check',
+ '-------------------------- Trustlist Verification begin --------------------------')
+ check_pass = True
+ for k, v in acls.items():
+ if k in whitelist.keys():
+ temp = whitelist[k]
+ for acl in v:
+ if acl not in temp:
+ check_pass = False
+ set_log_content(LogLevel(2).name, log_tag + '->whitelist_check',
+ 'precessName = {} the acl = {} trustlist is not configured.'.format(k, acl))
+ else:
+ check_pass = False
+ set_log_content(LogLevel(2).name, log_tag + '->whitelist_check', 'precessName = {} the acls = {} trustlist is not configured.'.format(k, v))
+ if check_pass == False:
+ raise AclCheckException(
+ '-------------------------- Trustlist Verification failed --------------------------')
+ else:
+ set_log_content(LogLevel(2).name, log_tag + '->whitelist_check',
+ '-------------------------- Trustlist Verification successful --------------------------')
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag + '->whitelist_check', e.msg)
+ raise
+
+
+def main(sn):
+ set_log_content(LogLevel(2).name, log_tag,
+ '-------------------------- ACL check begin --------------------------')
+ try:
+ hdc_command(GENERATING_TOKEN_INFO_COMMAND.format(sn, TOKEN_INFO_URL.format(sn)))
+ hdc_command(DOWNLOAD_TOKEN_INFO_COMMAND.format(sn, TOKEN_INFO_URL.format(sn), DOWNLOAD_TOKEN_INFO_URL.format(sn)))
+ hdc_command(CLEAR_TOKEN_INFO_FILE.format(sn, TOKEN_INFO_URL.format(sn)))
+ file = read_txt(DOWNLOAD_TOKEN_INFO_URL.format(sn))
+ # clear_token_info_txt(DOWNLOAD_TOKEN_INFO_URL.format(sn))
+ acls_dict = check_and_get(file)
+ acl_whitelist = read_json(PATH + 'acl_whitelist.json')
+ whitelist = get_acl_dict(acl_whitelist)
+ whitelist_check(whitelist, acls_dict)
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, e.msg)
+ set_log_content(LogLevel(1).name, log_tag,
+ '-------------------------- ACL check failed --------------------------')
+ finally:
+ set_log_content(LogLevel(2).name, log_tag,
+ '-------------------------- ACL check end --------------------------')
+
+
+if __name__ == '__main__':
+ sn = sys.argv[1]
+ main(sn)
diff --git a/cases/smoke/basic/screenshot32/acls_check/acl_whitelist.json b/cases/smoke/basic/screenshot32/acls_check/acl_whitelist.json
new file mode 100644
index 0000000000000000000000000000000000000000..35cef12ade7989f9c87a132c0388c541c719e07d
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/acls_check/acl_whitelist.json
@@ -0,0 +1,176 @@
+[
+ {
+ "processName": "hiview",
+ "acls": [
+ "ohos.permission.DUMP"
+ ]
+ },
+ {
+ "processName": "privacy_service",
+ "acls": [
+ "ohos.permission.MANAGE_DISPOSED_APP_STATUS"
+ ]
+ },
+ {
+ "processName": "inputmethod_service",
+ "acls": [
+ "ohos.permission.INPUT_MONITORING"
+ ]
+ },
+ {
+ "processName": "memmgrservice",
+ "acls": [
+ "ohos.permission.INTERACT_ACROSS_LOCAL_ACCOUNTS_EXTENSION"
+ ]
+ },
+ {
+ "processName": "locationhub",
+ "acls": [
+ "ohos.permission.GET_SENSITIVE_PERMISSIONS"
+ ]
+ },
+ {
+ "processName": "useriam",
+ "acls": [
+ "ohos.permission.ACCESS_AUTH_RESPOOL",
+ "ohos.permission.INTERACT_ACROSS_LOCAL_ACCOUNTS_EXTENSION"
+ ]
+ },
+ {
+ "processName": "pinauth",
+ "acls": [
+ "ohos.permission.ACCESS_AUTH_RESPOOL"
+ ]
+ },
+ {
+ "processName": "foundation",
+ "acls": [
+ "ohos.permission.PUBLISH_SYSTEM_COMMON_EVENT",
+ "ohos.permission.PERMISSION_START_ABILITIES_FROM_BACKGROUND",
+ "ohos.permission.GRANT_SENSITIVE_PERMISSIONS",
+ "ohos.permission.REVOKE_SENSITIVE_PERMISSIONS",
+ "ohos.permission.MANAGE_HAP_TOKENID",
+ "ohos.permission.START_INVISIBLE_ABILITY",
+ "ohos.permission.INPUT_MONITORING",
+ "ohos.permission.INSTALL_SANDBOX_BUNDLE"
+ ]
+ },
+ {
+ "processName": "dscreen",
+ "acls": [
+ "ohos.permission.CAPTURE_SCREEN"
+ ]
+ },
+ {
+ "processName": "sensors",
+ "acls": [
+ "ohos.permission.GET_SENSITIVE_PERMISSIONS"
+ ]
+ },
+ {
+ "processName": "camera_service",
+ "acls": [
+ "ohos.permission.GET_SENSITIVE_PERMISSIONS"
+ ]
+ },
+ {
+ "processName": "audio_policy",
+ "acls": [
+ "ohos.permission.GET_SENSITIVE_PERMISSIONS"
+ ]
+ },
+ {
+ "processName": "msdp_sa",
+ "acls": [
+ "ohos.permission.INPUT_MONITORING",
+ "ohos.permission.ACCESS_DISTRIBUTED_HARDWARE",
+ "ohos.permission.INTERCEPT_INPUT_EVENT"
+ ]
+ },
+ {
+ "processName": "dslm_service",
+ "acls": [
+ "ohos.permission.ACCESS_IDS"
+ ]
+ },
+ {
+ "processName": "accountmgr",
+ "acls": [
+ "ohos.permission.ENFORCE_USER_IDM"
+ ]
+ },
+ {
+ "processName": "hdcd",
+ "acls": [
+ "ohos.permission.GET_BUNDLE_INFO_PRIVILEGED",
+ "ohos.permission.INSTALL_BUNDLE",
+ "ohos.permission.LISTEN_BUNDLE_CHANGE",
+ "ohos.permission.CHANGE_ABILITY_ENABLED_STATE",
+ "ohos.permission.REMOVE_CACHE_FILES",
+ "ohos.permission.START_ABILITIES_FROM_BACKGROUND",
+ "ohos.permission.PERMISSION_USED_STATS",
+ "ohos.permission.DUMP",
+ "ohos.permission.NOTIFICATION_CONTROLLER",
+ "ohos.permission.PUBLISH_SYSTEM_COMMON_EVENT",
+ "ohos.permission.CLEAN_APPLICATION_DATA"
+ ]
+ },
+ {
+ "processName": "softbus_server",
+ "acls": [
+ "ohos.permission.GET_SENSITIVE_PERMISSIONS"
+ ]
+ },
+ {
+ "processName": "backup_sa",
+ "acls": [
+ "ohos.permission.INSTALL_BUNDLE"
+ ]
+ },
+ {
+ "processName": "media_service",
+ "acls": [
+ "ohos.permission.CAPTURE_SCREEN"
+ ]
+ },
+ {
+ "processName": "security_component_service",
+ "acls": [
+ "ohos.permission.GRANT_SENSITIVE_PERMISSIONS",
+ "ohos.permission.REVOKE_SENSITIVE_PERMISSIONS"
+ ]
+ },
+ {
+ "processName": "distributedsched",
+ "acls": [
+ "ohos.permission.INPUT_MONITORING",
+ "ohos.permission.MANAGE_MISSIONS"
+ ]
+ },
+ {
+ "processName": "accessibility",
+ "acls": [
+ "ohos.permission.INTERCEPT_INPUT_EVENT"
+ ]
+ },
+ {
+ "processName": "dlp_permission_service",
+ "acls": [
+ "ohos.permission.INSTALL_SANDBOX_BUNDLE",
+ "ohos.permission.UNINSTALL_SANDBOX_BUNDLE"
+ ]
+ },
+ {
+ "processName": "quick_fix",
+ "acls": [
+ "ohos.permission.INSTALL_QUICK_FIX_BUNDLE",
+ "ohos.permission.UNINSTALL_QUICK_FIX_BUNDLE"
+ ]
+ },
+ {
+ "processName": "sharing_service",
+ "acls": [
+ "ohos.permission.CAPTURE_SCREEN"
+ ]
+ }
+]
diff --git a/cases/smoke/basic/screenshot32/acls_check/config.py b/cases/smoke/basic/screenshot32/acls_check/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff0a63065d272d27055c8fb55c64c90036bddacc
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/acls_check/config.py
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import platform
+import time
+
+# 系统分隔符
+SYS_SEQ = os.sep
+# 系统平台
+SYS_PLATFORM = platform.system()
+
+PATH = os.path.dirname(os.path.realpath(__file__)) + SYS_SEQ
+# 脚本的执行日志
+LOG_FILE = PATH + SYS_SEQ + "native_sa.log"
+# 设备上生成的token info 文件名
+TOKEN_INFO_NAME = 'token_info_'+ str(time.time_ns()) +'_{}.txt'
+# 设备上生成文件存放位置
+TOKEN_INFO_URL = '/data/{}'.format(TOKEN_INFO_NAME)
+# 设备上文件生成命令
+GENERATING_TOKEN_INFO_COMMAND = 'hdc -t {} shell atm dump -t > {}'
+# 下载token info 文件存放路径
+DOWNLOAD_TOKEN_INFO_URL = PATH + TOKEN_INFO_NAME
+# 文件下载命令
+DOWNLOAD_TOKEN_INFO_COMMAND = 'hdc -t {} file recv {} {}'
+# 删除设备上的文件命令
+CLEAR_TOKEN_INFO_FILE = 'hdc -t {} shell rm -rf {}'
diff --git a/cases/smoke/basic/screenshot32/acls_check/read_acl_whitelist.py b/cases/smoke/basic/screenshot32/acls_check/read_acl_whitelist.py
new file mode 100644
index 0000000000000000000000000000000000000000..802950857dfc6272539a49501d55f9167268204f
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/acls_check/read_acl_whitelist.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import sys
+
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+import json
+from utils import *
+
+log_tag = 'read_acl_whitelist'
+
+
+def read_json(path):
+ set_log_content(LogLevel(2).name, log_tag, 'read {}'.format(path))
+ if not os.path.exists(path):
+ set_log_content(LogLevel(2).name, log_tag, '{} file not exits'.format(path))
+ raise AclCheckException('{} file not exits'.format(path))
+ try:
+ with open(path, 'r') as f:
+ file = f.read()
+ return file
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, e.msg)
+ raise AclCheckException('{} failed to read the file.'.format(path))
+
+
+def get_acl_dict(file):
+ try:
+ acls_dict = {}
+ f = json.loads(file)
+ for it in f:
+ key = it.get('processName')
+ values = it.get('acls')
+ acls_dict[key] = values
+ return acls_dict
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, '{}'.format(e.msg))
+ raise
diff --git a/cases/smoke/basic/screenshot32/acls_check/resolve_token_info.py b/cases/smoke/basic/screenshot32/acls_check/resolve_token_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c2d16c9cab204e53e1be1d7e5cb05eece6d0ada
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/acls_check/resolve_token_info.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import sys
+
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+from utils import *
+
+log_tag = 'resolve_token_info'
+
+
+def check_and_get(file: list):
+ nativeAcls = {}
+ try:
+ set_log_content(LogLevel(2).name, log_tag,
+ '-------------------------- invalidPermList check begin --------------------------')
+ check_pass = True
+ processName = 'xxxxxxxx'
+ for it in file:
+ if it.find('processName') != -1:
+ processName = it.replace(',', '').split(':')[1].split('"')[1]
+ elif it.find('invalidPermList') != -1:
+ check_pass = False
+ msg = 'invalidPermList information is detected in processName = {}'.format(processName)
+ set_log_content(LogLevel(2).name, log_tag, msg)
+ elif check_pass and it.find('nativeAcls') != -1:
+ bb = it.split(':')
+ if bb[1].split('"')[1].__len__() == 0:
+ continue
+ permissionNameList = bb[1].split('"')[1].split(',')
+ nativeAcls[processName] = permissionNameList
+ if check_pass == False:
+ raise AclCheckException('-------------------------- The invalidPermList check failed --------------------------')
+ else:
+ set_log_content(LogLevel(2).name, log_tag,
+ '-------------------------- The invalidPermList check successful --------------------------')
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, e.msg)
+ raise
+ return nativeAcls
+
+
+def clear_token_info_txt(path):
+ try:
+ os.remove(path)
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, e.msg)
+
+
+def read_txt(path):
+ set_log_content(LogLevel(2).name, log_tag, 'read {}'.format(path))
+ if not os.path.exists(path):
+ set_log_content(LogLevel(2).name, log_tag, '{} file not exits'.format(path))
+ raise AclCheckException('{} file not exits!'.format(path))
+ try:
+ with open(path, 'r') as f:
+ file = f.readlines()
+ return file
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, e.msg)
+ raise AclCheckException('{} failed to read the file.'.format(path))
diff --git a/cases/smoke/basic/screenshot32/acls_check/utils.py b/cases/smoke/basic/screenshot32/acls_check/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..545ae46cde8087cc3b771e21862f9d6a48b5cd69
--- /dev/null
+++ b/cases/smoke/basic/screenshot32/acls_check/utils.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import enum
+import logging
+import os
+import sys
+from subprocess import Popen, PIPE, STDOUT
+
+sys.path.append(os.path.dirname(os.path.realpath(__file__)) + os.sep)
+from config import *
+
+log_tag = 'utils'
+
+
+class AclCheckException(Exception):
+ def __init__(self, msg):
+ self.msg = msg
+
+
+def timestamp():
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
+
+
+class LogLevel(enum.Enum):
+ Error = 1
+ Info = 2
+
+
+logging.basicConfig(filename=LOG_FILE, level=logging.INFO, format='%(asctime)s %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S %a')
+
+
+def log(msg):
+ logging.error(msg)
+
+
+def set_log_content(level, tag, msg):
+ log_content = timestamp() + ' {}'.format(level) + ' [{}]'.format(tag) + ' {}'.format(msg)
+ print(log_content)
+ log(log_content)
+ return (log_content)
+
+
+def shell_command(command_list: list):
+ try:
+ print(command_list)
+ process = Popen(command_list, stdout=PIPE, stderr=STDOUT)
+ exitcode = process.wait()
+ set_log_content(LogLevel(2).name, log_tag, '{} operation fuccessful!'.format(command_list))
+ return process, exitcode
+ except Exception as e:
+ set_log_content(LogLevel(1).name, log_tag, e.msg)
+ raise AclCheckException(e.msg)
+
+
+def hdc_command(command):
+ print(command)
+ command_list = command.split(' ')
+ _, exitcode = shell_command(command_list)
+ return exitcode
diff --git a/cases/smoke/basic/screenshot32/resource/app_capture_screen_test_config.json b/cases/smoke/basic/screenshot32/resource/app_capture_screen_test_config.json
index 2ea874344ba3733ed9cce03823a01a39e1e8281c..a9b2319b4e45901ac9ff89e87a65ff6598fbb9ab 100644
--- a/cases/smoke/basic/screenshot32/resource/app_capture_screen_test_config.json
+++ b/cases/smoke/basic/screenshot32/resource/app_capture_screen_test_config.json
@@ -1,12 +1,10 @@
[
{
- "DEVICE_1":[5, 6, 7, 8, 9, 10],
+ "DEVICE_1":[5, 6, 7, 8, 9],
"DEVICE_2":[1, 2, 3, 4],
- "DEVICE_3":[5, 6, 7, 8, 9, 10],
- "DEVICE_4":[1, 2, 3, 4],
- "recent-x-y":[515, 1240],
+ "recent-x-y":[513, 1244],
"recent_del-x-y":[360, 1170],
- "permisson_ok-x-y":[500, 1130],
+ "permisson_ok-x-y":[530, 1100],
"wlan-x-y":[300, 300],
"wlan_button-x-y":[640, 200],
"note_content-x-y":[500, 310],
@@ -20,7 +18,7 @@
"screenshot-x-y":[115,480],
"remount":["mount -o rw,remount"],
"stop_hilog":["hilog -w stop"],
- "cmp_cmd-level":["", 80],
+ "cmp_cmd-level":["", 65],
"get_file_from_dev":[""],
"send_file_to_dev":["", ""]
},
@@ -30,8 +28,8 @@
"crop_range":[140, 450, 30, 500],
"kill_settings": ["killall com.ohos.settings"],
"all_actions": [
- [1, "shot_cmd", "settings"], [1, "cmp_cmd-level", "settings"], [8, "wlan-x-y"], [2, "shot_cmd", "wlan_list"], [20, "connect_wifi", "wlan_list"], [1, "shot_cmd", "wifi"],
- [1, "shot_cmd", "wifi_connection_status"], [1, "cmp_twice", "wifi", 90, "crop_range"], [1, "kill_settings"]
+ [1, "shot_cmd", "settings"], [1, "cmp_cmd-level", "settings"], [8, "wlan-x-y"], [2, "shot_cmd", "wlan_list"], [25, "connect_wifi", "wlan_list"], [1, "shot_cmd", "wifi"],
+ [1, "shot_cmd", "wifi_connection_status"], [1, "cmp_twice", "wifi", 20, "crop_range"], [1, "kill_settings"]
]
},
{
@@ -57,14 +55,14 @@
"entry": "",
"check_result":["cd /data/log/hilog && grep -nr PreviewOutputCallback", "OnFrameStarted"],
"compress_log":["cd /data/log/hilog && tar -cf camera_log.tar *"],
- "open_camera_log":["rm /data/log/hilog/* && hilog -b X;hilog -b D -T CAMERA;hilog -r"],
+ "open_camera_log":["rm -rf /data/log/hilog/* && hilog -b X;hilog -b D -T CAMERA;hilog -r"],
"start_camera":["aa start -a com.ohos.camera.MainAbility -b com.ohos.camera"],
"recover_log":["cd data/log/hilog/;hilog -x > camera_log.txt;hilog -b D"],
"check_photos":["aa dump -a | grep com.ohos.photos.MainAbility", "com.ohos.photos"],
"kill_camera": ["killall com.ohos.camera"],
"kill_photos": ["killall com.ohos.photos"],
"all_actions": [
- [1, "open_camera_log"], [5, "start_camera"], [3, "take_photos-x-y"], [2, "convert_to_video-x-y"], [3, "take_photos-x-y"], [2, "stop_video-x-y"], [6, "convert_to_photos-x-y"],
+ [1, "open_camera_log"], [5, "start_camera"], [3, "take_photos-x-y"], [2, "convert_to_video-x-y"], [3, "take_photos-x-y"], [2, "stop_video-x-y"], [11, "convert_to_photos-x-y"],
[1, "recover_log"], [1, "check_result"], [1, "shot_cmd", "camera"], [1, "compress_log"], [1, "check_photos"], [1, "get_file_from_dev", "/data/log/hilog/camera_log.tar"],
[1, "kill_camera"], [1, "kill_photos"]
]
@@ -82,26 +80,12 @@
[1, "process_check"], [2, "sandbox_path_check"], [1, "kill_photos"]
]
},
- {
- "app_name": "video_test",
- "entry": "",
- "mk_test_dir":["mkdir -p /data/app/el2/100/base/ohos.acts.multimedia.video.videoplayer/haps/entry/files"],
- "start_video_log":["rm /data/log/hilog/* && hilog -r && hilog -Q pidoff;hilog -G 512M;hilog -w start -l 400000000 -m none"],
- "start_test":["aa test -p ohos.acts.multimedia.video.videoplayer -b ohos.acts.multimedia.video.videoplayer -s unittest OpenHarmonyTestRunner -w 2000000 -s timeout 60000", "Failure: 0, Error: 0, Pass: 1"],
- "compress_log":["cd /data/log/hilog && tar -cf video_log.tar *"],
- "kill_video": ["killall ohos.acts.multimedia.video.videoplayer"],
- "all_actions": [
- [1,"start_video_log"], [2, "install_hap", "vediotest/ActsVideoPlayerJsTest.hap"], [1, "mk_test_dir"], [1, "remount"],
- [1, "send_file_to_dev", "vediotest/H264_AAC.mp4", "/data/app/el2/100/base/ohos.acts.multimedia.video.videoplayer/haps/entry/files/"],
- [5, "start_test"], [1, "stop_hilog"], [1, "compress_log"], [1, "get_file_from_dev", "/data/log/hilog/video_log.tar"], [1, "kill_video"]
- ]
- },
{
"app_name": "note",
"entry": "aa start -a MainAbility -b com.ohos.note",
"kill_note": ["killall com.ohos.note"],
"all_actions": [
- [2, "kill_keyboard-x-y"], [2, "permisson_ok-x-y"], [2, "permisson_ok-x-y"], [5, "note_content-x-y"], [2, "note_content-x-y"], [1, "shot_cmd", "note"], [1, "cmp_cmd-level", "note"], [2, "recent-x-y"], [1, "recent_del-x-y"]
+ [2, "kill_keyboard-x-y"], [2, "permisson_ok-x-y"], [2, "permisson_ok-x-y"], [5, "note_content-x-y"], [2, "note_content-x-y"], [1, "shot_cmd", "note"], [1, "cmp_cmd-level", "note", 70], [2, "recent-x-y"], [1, "recent_del-x-y"]
]
},
{
diff --git a/cases/smoke/basic/screenshot32/resource/capturescreentest.py b/cases/smoke/basic/screenshot32/resource/capturescreentest.py
index f64eb1c52a4d6244ecac2f1b3ba856f3374d413a..bf3f0a7869caa0bd32bf4baf80f6f74cc3e33df7 100644
--- a/cases/smoke/basic/screenshot32/resource/capturescreentest.py
+++ b/cases/smoke/basic/screenshot32/resource/capturescreentest.py
@@ -23,11 +23,14 @@ import re
import subprocess
import shlex
import datetime
-import sqlite3
import shutil
import numpy
import cv2
import pytesseract
+sys.path.append(os.path.dirname(os.path.realpath(__file__)).replace('resource', 'acls_check'))
+sys.path.append(os.path.dirname(os.path.realpath(__file__)).replace('resource', 'APL_compare_03'))
+from acl_check import *
+from compare import *
from pytesseract import Output
from PIL import Image
@@ -36,13 +39,15 @@ def print_to_log(str):
time = datetime.datetime.now()
str = "[{}] {}".format(time, str)
print(str)
- with open(os.path.join(args.save_path, 'test_{}.log'.format(args.device_num)),\
- mode='a', encoding='utf-8') as log_file:
+ with open(os.path.join(args.save_path,
+ 'test_{}.log'.format(args.device_num)),
+ mode='a',
+ encoding='utf-8') as file:
console = sys.stdout
- sys.stdout = log_file
+ sys.stdout = file
print(str)
sys.stdout = console
- log_file.close()
+ file.close()
def enter_cmd(mycmd, waittime=0, printresult=1):
@@ -54,12 +59,14 @@ def enter_cmd(mycmd, waittime=0, printresult=1):
while enter_cmdRetry:
enter_cmdRetry -= 1
try:
- p = subprocess.Popen(mycmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ p = subprocess.Popen(mycmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
result, unused_err = p.communicate(timeout=25)
try:
- result=result.decode(encoding="utf-8")
+ result = result.decode(encoding="utf-8")
except UnicodeDecodeError:
- result=result.decode('gbk', errors='ignore')
+ result = result.decode('gbk', errors='ignore')
break
except Exception as e:
result = 'retry failed again'
@@ -67,27 +74,18 @@ def enter_cmd(mycmd, waittime=0, printresult=1):
cmd_retry_cnt += 1
p.kill()
if printresult == 1:
- with open(os.path.join(args.save_path, 'test_{}.bat'.format(args.device_num)),\
- mode='a', encoding='utf-8') as cmd_f:
- cmd_f.write(mycmd + '\n')
- cmd_f.close()
print_to_log(mycmd)
print_to_log(result)
sys.stdout.flush()
if waittime != 0:
time.sleep(waittime)
- if printresult == 1:
- with open(os.path.join(args.save_path, 'test_{}.bat'.format(args.device_num)),\
- mode='a', encoding='utf-8') as cmd_f:
- cmd_f.write("ping -n {} 127.0.0.1>null\n".format(waittime))
- cmd_f.close()
return result
-def enter_shell_cmd(shellcmd, waittime=0, printresult=1):
+def enter_shell_cmd(shellcmd, waittime=1, printresult=1):
if shellcmd == "":
return
- cmd = "hdc_std -t {} shell \"{}\"".format(args.device_num, shellcmd)
+ cmd = "hdc -t {} shell \"{}\"".format(args.device_num, shellcmd)
return enter_cmd(cmd, waittime, printresult)
@@ -98,43 +96,43 @@ def sys_exit():
enter_shell_cmd("cd /data/log/faultlog/faultlogger && tar -cf after_test_jscrash{}.tar jscrash*".format(args.device_num))
file_from_dev("/data/log/faultlog/faultlogger/after_test_jscrash{}.tar".format(args.device_num), \
os.path.normpath(args.save_path))
- print_to_log("SmokeTest:: SmokeTest find some key problems!")
- print_to_log("SmokeTest:: End of check, test failed!")
+ print_to_log("SmokeTest: SmokeTest find some key problems!")
+ print_to_log("SmokeTest: End of check, test failed!")
sys.exit(98)
def file_to_dev(src, dst):
- cmd = "hdc_std -t {} file send \"{}\" \"{}\"".format(args.device_num, src, dst)
+ cmd = "hdc -t {} file send \"{}\" \"{}\"".format(args.device_num, src, dst)
return enter_cmd(cmd, 1, 1)
def file_from_dev(src, dst):
- cmd = "hdc_std -t {} file recv \"{}\" \"{}\"".format(args.device_num, src, dst)
+ cmd = "hdc -t {} file recv \"{}\" \"{}\"".format(args.device_num, src, dst)
return enter_cmd(cmd, 1, 1)
def connect_check():
- connection_status = enter_cmd("hdc_std list targets", 2)
+ connection_status = enter_cmd("hdc list targets", 2)
connection_cnt = 0
while args.device_num not in connection_status and connection_cnt < 15:
- connection_status = enter_cmd("hdc_std list targets", 2)
+ connection_status = enter_cmd("hdc list targets", 2)
connection_cnt += 1
if connection_cnt == 15:
- print_to_log("SmokeTest:: Device disconnection!!")
- print_to_log("SmokeTest:: End of check, test failed!")
+ print_to_log("SmokeTest: Device disconnection!!")
+ print_to_log("SmokeTest: End of check, test failed!")
sys.exit(101)
def sandbox_check(process):
- print_to_log("SmokeTest:: start to check sandbox path")
+ print_to_log("SmokeTest: start to check sandbox path")
medialibrarydata_pidnum = enter_shell_cmd("pgrep -f {}".format(process), 1)
medialibrarydata_pidnum = medialibrarydata_pidnum.strip()
sandboxf = enter_shell_cmd("echo \"ls /storage/media/local/\"|nsenter -t {} -m sh".format(medialibrarydata_pidnum), 1)
if "files" not in sandboxf:
- print_to_log("SmokeTest:: error: can not find sandbox path : /storage/media/local/files")
+ print_to_log("SmokeTest: error: can not find sandbox path : /storage/media/local/files")
return -1
else:
- print_to_log("SmokeTest:: success: find sandbox path : /storage/media/local/files")
+ print_to_log("SmokeTest: success: find sandbox path : /storage/media/local/files")
return 1
@@ -150,7 +148,10 @@ def get_coordinate(path, target):
wifi_range[1] += height
print_to_log(wifi_range)
data_img = img[wifi_range[0]:wifi_range[1], wifi_range[2]:wifi_range[3]]
- data = pytesseract.image_to_data(data_img, output_type=Output.DICT, config=tessdata_dir_config, lang='eng')
+ data = pytesseract.image_to_data(data_img,
+ output_type=Output.DICT,
+ config=tessdata_dir_config,
+ lang='eng')
for i in range(len(data['text'])):
if data['text'][i] == target:
dx = int((wifi_range[2] + wifi_range[3]) / 2)
@@ -165,30 +166,25 @@ def get_coordinate(path, target):
def connect_wifi(prefix, pic):
try:
- data = get_coordinate("{}\\{}_{}".format(args.save_path, prefix, pic), "testapold")
- enter_shell_cmd("uinput -M -m {} {} -c 0".format(data[0], data[1]), WAIT_TIME_TWO)
+ data = get_coordinate("{}\\{}_{}".format(args.save_path, prefix, pic),
+ "testapold")
+ enter_shell_cmd("uinput -M -m {} {} -c 0".format(data[0], data[1]),
+ WAIT_TIME_TWO)
enter_shell_cmd("uinput -M -m 360 200 -c 0")
- enter_shell_cmd("uinput -M -m 680 810 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 80 910 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 150 910 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 150 910 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 110 810 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 50 1150 -c 0", WAIT_TIME_TWO)
- enter_shell_cmd("uinput -M -m 680 810 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 50 1150 -c 0", WAIT_TIME_TWO)
- enter_shell_cmd("uinput -M -m 250 810 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 220 910 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 50 1150 -c 0", WAIT_TIME_TWO)
- enter_shell_cmd("uinput -M -m 40 810 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 500 1020 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 50 1150 -c 0", WAIT_TIME_ONE)
- enter_shell_cmd("uinput -M -m 680 690 -c 0")
- enter_shell_cmd("snapshot_display -f /data/screen_test/{}".format("testapold.jpeg"))
- file_from_dev("/data/screen_test/{}".format("testapold.jpeg"), args.save_path)
+ enter_shell_cmd("uinput -K -d 2032 -u 2032 -d 2017 -u 2017 -d 2035"
+ " -u 2035 -d 2035 -u 2035 -d 2039 -u 2039 -d 2000"
+ " -u 2000 -d 2034 -u 2034 -d 2020 -u 2020 -d 2001 -u 2001")
+ enter_shell_cmd("uinput -M -m 360 200 -c 0")
+ enter_shell_cmd("uinput -M -m 50 1140 -c 0")
+ enter_shell_cmd("uinput -M -m 500 1020 -c 0")
+ enter_shell_cmd("uinput -M -m 50 1140 -c 0")
+ enter_shell_cmd("uinput -K -d 2054 -u 2054")
+ enter_shell_cmd("snapshot_display -f /data/local/tmp/screen_test/{}".format("testapold.jpeg"))
+ file_from_dev("/data/local/tmp/screen_test/{}".format("testapold.jpeg"), args.save_path)
enter_shell_cmd("uinput -M -m 550 680 -c 0", single_action[0])
except Exception as e:
print(e)
- print_to_log("SmokeTest:: wifi list loading errror!")
+ print_to_log("SmokeTest: wifi list loading errror!")
def calculate(image1, image2):
@@ -223,11 +219,14 @@ def classify_hist_with_split(image1, image2, size=(256, 256)):
def crop_picture(prefix, pic, crop_range):
- pic_path = "{}\\{}_{}".format(args.save_path, prefix, pic)
- save_path = "{}\\{}_{}".format(args.save_path, prefix, pic)
- im = cv2.imread(pic_path)
- im = im[crop_range[0]:crop_range[1], crop_range[2]:crop_range[3]]
- cv2.imwrite(save_path, im)
+ try:
+ pic_path = "{}\\{}_{}".format(args.save_path, prefix, pic)
+ save_path = "{}\\{}_{}".format(args.save_path, prefix, pic)
+ im = cv2.imread(pic_path)
+ im = im[crop_range[0]:crop_range[1], crop_range[2]:crop_range[3]]
+ cv2.imwrite(save_path, im)
+ except Exception as e:
+ pass
def cmp_picture(prefix, pic, num=1):
@@ -248,10 +247,10 @@ def cmp_picture(prefix, pic, num=1):
def shot_and_cmp(image):
prefix = args.device_num
- enter_shell_cmd("snapshot_display -f /data/screen_test/{}_{}".format(prefix, image))
- file_from_dev("/data/screen_test/{}_{}".format(prefix, image), args.save_path)
+ enter_shell_cmd("snapshot_display -f /data/local/tmp/screen_test/{}_{}".format(prefix, image))
+ file_from_dev("/data/local/tmp/screen_test/{}_{}".format(prefix, image), args.save_path)
similarity = cmp_picture(prefix, image)
- print_to_log("SmokeTest:: launcher similarity is {}%".format(similarity))
+ print_to_log("SmokeTest: launcher similarity is {}%".format(similarity))
return similarity
@@ -259,7 +258,7 @@ def distributed_test():
if "1/2" in args.test_num or "2/2" in args.test_num:
report_path = os.path.normpath(os.path.join(args.save_path, "distributed_report.txt"))
if args.test_num == "2/2":
- enter_shell_cmd("ifconfig eth0 192.168.0.1", WAIT_TIME_ONE)
+ enter_shell_cmd("ifconfig eth0 192.168.0.1")
ping_result = enter_shell_cmd("ping 192.168.0.2 -i 1 -c 2", 3)
file_is_exist = enter_shell_cmd("cd /data; find . -name distributed_report.txt")
ping_cnt = 0
@@ -268,22 +267,22 @@ def distributed_test():
ping_result = enter_shell_cmd("ping 192.168.0.2 -i 1 -c 2", WAIT_TIME_FOUR)
ping_cnt += 1
if ping_cnt == 30:
- print_to_log("SmokeTest:: Ping failed, timeout of 80s")
+ print_to_log("SmokeTest: Ping failed, timeout of 80s")
sys_exit()
while "distributed_report.txt" not in file_is_exist and wait_cnt < 30:
- print_to_log("SmokeTest:: waiting for the distributed test to end ")
+ print_to_log("SmokeTest: waiting for the distributed test to end ")
file_is_exist = enter_shell_cmd("cd /data; find . -name distributed_report.txt", WAIT_TIME_FOUR)
wait_cnt += 1
elif args.test_num == "1/2":
- enter_shell_cmd("ifconfig eth0 192.168.0.2", WAIT_TIME_ONE)
+ enter_shell_cmd("ifconfig eth0 192.168.0.2")
ping_result = enter_shell_cmd("ping 192.168.0.1 -i 1 -c 2", WAIT_TIME_FOUR)
ping_cnt = 0
while "2 packets transmitted, 2 received" not in ping_result and ping_cnt < 20:
ping_result = enter_shell_cmd("ping 192.168.0.1 -i 1 -c 2", WAIT_TIME_FOUR)
ping_cnt += 1
if ping_cnt == 30:
- print_to_log("SmokeTest:: Ping failed, timeout of 80s")
- print_to_log("SmokeTest:: ##### case 0 : distributed test start #####")
+ print_to_log("SmokeTest: Ping failed, timeout of 80s")
+ print_to_log("SmokeTest: ##### case 0 : distributed test start #####")
execute_path = os.path.normpath(os.path.join(args.tools_path, "resource"))
os.system("cd {} && python distributedtest.py --path {}".format(execute_path, args.save_path))
distributed_result = ""
@@ -293,20 +292,20 @@ def distributed_test():
distributed_result = f.read()
f.close()
except Exception as reason:
- print_to_log("SmokeTest:: distributed_report.txt is not exist!")
+ print_to_log("SmokeTest: distributed_report.txt do not exist!")
if "distributedcalc" in distributed_result:
- print_to_log("SmokeTest:: testcase 0, distributed is ok!")
+ print_to_log("SmokeTest: testcase 0, distributed is ok!")
else:
- print_to_log("SmokeTest:: error:testcase 0, distributed failed!")
+ print_to_log("SmokeTest: error:testcase 0, distributed failed!")
sys_exit()
- enter_shell_cmd("ifconfig eth0 down", WAIT_TIME_ONE)
+ enter_shell_cmd("ifconfig eth0 down")
def open_wlan():
enter_shell_cmd("aa start -a com.ohos.settings.MainAbility -b com.ohos.settings", WAIT_TIME_FOUR)
enter_shell_cmd("uinput -M -m 300 300 -c 0", WAIT_TIME_TWO)
enter_shell_cmd("uinput -M -m 640 200 -c 0", WAIT_TIME_FOUR)
- time.sleep(WAIT_TIME_TWO)
+ time.sleep(WAIT_TIME_FOUR)
enter_shell_cmd("killall com.ohos.settings", WAIT_TIME_TWO)
@@ -322,7 +321,7 @@ if __name__ == "__main__":
args = parser.parse_args()
if args.device_num == 'null':
- result = enter_cmd("hdc_std list targets", 1, 0)
+ result = enter_cmd("hdc list targets", 1, 0)
print(result)
args.device_num = result.split()[0]
with open(args.config) as f:
@@ -330,39 +329,41 @@ if __name__ == "__main__":
cmp_status = 0
global_pos = all_app[0]
- WAIT_TIME_ONE = 1
WAIT_TIME_TWO = 2
WAIT_TIME_FOUR = 4
reboot_cnt = 2
while reboot_cnt:
reboot_cnt -= 1
- enter_shell_cmd("mkdir -p /data/screen_test/train_set")
+ enter_shell_cmd("mkdir -p /data/local/tmp/screen_test/train_set")
+ enter_shell_cmd("power-shell wakeup;power-shell setmode 602")
rmlock_cnt = 3
while rmlock_cnt:
- enter_shell_cmd("uinput -T -m 425 1000 425 400;power-shell wakeup;uinput -T -m 425 400 425 1000;\
- power-shell setmode 602;uinput -T -m 425 1000 425 400;", WAIT_TIME_ONE)
+ enter_shell_cmd("uinput -T -m 425 400 425 1000;uinput -T -m 425 1000 425 400")
rmlock_cnt -= 1
- enter_shell_cmd("hilog -w stop", WAIT_TIME_ONE)
- enter_shell_cmd("cd /data/log/hilog && tar -cf system_start_log_{}.tar *".format(args.device_num), WAIT_TIME_ONE)
+ enter_shell_cmd("hilog -w stop")
+ enter_shell_cmd("cd /data/log/hilog && tar -cf system_start_log_{}.tar *".format(args.device_num))
file_from_dev("/data/log/hilog/system_start_log_{}.tar".format(args.device_num), args.save_path)
connect_check()
launcher_similarity = shot_and_cmp("launcher.jpeg")
- if launcher_similarity >= 90:
- print_to_log("SmokeTest:: launcher screenshot comparison is ok!")
+ power_state = enter_shell_cmd("hidumper -s 3308")
+ if "State=2" not in power_state:
+ print_to_log("SmokeTest: ERROR, DISPLAY POWER MANAGER DUMP State ≠ 2")
+ if launcher_similarity >= 80:
+ print_to_log("SmokeTest: launcher screenshot comparison is ok!")
break
elif reboot_cnt >= 1:
- print_to_log("SmokeTest:: launcher screenshot comparison failed, reboot and try!!!")
+ print_to_log("SmokeTest: launcher screenshot comparison failed, reboot and try!!!")
enter_shell_cmd("rm -rf /data/*;reboot")
for i in range(5):
- enter_cmd("hdc_std list targets", 10)
+ enter_cmd("hdc list targets", 10)
else:
- print_to_log("SmokeTest:: launcher screenshot comparison failed")
+ print_to_log("SmokeTest: launcher screenshot comparison failed")
sys_exit()
enter_shell_cmd("cat /proc/`pidof foundation`/smaps_rollup")
- print_to_log("\nSmokeTest:: ########## First check key processes start ##############")
+ print_to_log("\nSmokeTest: ########## First check key processes start ##############")
lose_process = []
process_pid = {}
with open(os.path.normpath(os.path.join(args.tools_path, "resource/process.txt")), "r+") as f:
@@ -370,7 +371,7 @@ if __name__ == "__main__":
two_check_process_list = text.split('#####')[1].split()[0:-1]
other_process_list = text.split('#####')[2].split()
for pname in two_check_process_list:
- pids = enter_cmd("hdc_std -t {} shell pidof {}".format(args.device_num, pname), 0, 1)
+ pids = enter_cmd("hdc -t {} shell pidof {}".format(args.device_num, pname), 0, 1)
try:
pidlist = pids.split()
int(pidlist[0])
@@ -386,47 +387,54 @@ if __name__ == "__main__":
lose_process.append(pname)
if lose_process:
- print_to_log("SmokeTest:: error: %s, These processes are not exist!!!" % lose_process)
+ print_to_log("SmokeTest: error: %s, These processes do not exist!!!" % lose_process)
sys_exit()
else:
- print_to_log("SmokeTest:: first processes check is ok")
+ print_to_log("SmokeTest: first processes check is ok")
- power_state = enter_shell_cmd("hidumper -s 3308", WAIT_TIME_ONE)
- if "State=2" not in power_state:
- print_to_log("SmokeTest:: DISPLAY POWER MANAGER DUMP State=0")
+ apl_check_main(args.device_num)
+ apl_compare = os.path.normpath(os.path.join(args.tools_path, "APL_compare_03", "apl_compare.log"))
+ try:
+ with open(apl_compare, mode='r', encoding='utf-8', errors='ignore') as compare_file:
+ compare_file.seek(0)
+ apl_result = compare_file.read()
+ compare_file.close()
+ except Exception as reason:
+ print_to_log("SmokeTest: error: apl_compare.log do not exist!")
+ if "APL Check failed" in apl_result:
+ print_to_log("SmokeTest: error: apl check failed")
sys_exit()
- open_wlan()
+ main(args.device_num)
+ native_sa = os.path.normpath(os.path.join(args.tools_path, "acls_check", "native_sa.log"))
+ try:
+ with open(native_sa, mode='r', encoding='utf-8', errors='ignore') as native_file:
+ native_file.seek(0)
+ acl_result = native_file.read()
+ native_file.close()
+ except Exception as reason:
+ print_to_log("SmokeTest: error: native_sa.log do not exist!")
+ if "ACL check failed" in acl_result:
+ print_to_log("SmokeTest: error: acl check failed")
+ sys_exit()
- special_warehouse = [
- "arkui_ace_engine",
- "developtools_integration_verification",
- "graphic_graphic_2d",
- "windowmanager"]
-
- for warehouse in special_warehouse:
- if warehouse in args.pr_url:
- if args.test_num == "1/2":
- args.test_num = "3/2"
- elif args.test_num == "2/2":
- args.test_num = "4/2"
- print(args.pr_url)
try:
args.test_num.index('/')
idx_total = args.test_num.split('/')
if len(idx_total) != 2:
- print_to_log("SmokeTest:: test_num is invaild !!!")
+ print_to_log("SmokeTest: test_num is invaild !!!")
sys_exit()
elif idx_total[1] == '1':
- idx_list = list(range(1, len(all_app)))
+ idx_list = global_pos['DEVICE_2']+global_pos['DEVICE_1']
else:
idx_list = global_pos['DEVICE_{}'.format(idx_total[0])]
except ValueError as e:
print_to_log(e)
idx_list = list(map(eval, args.test_num.split()))
- print_to_log("SmokeTest:: start to carry out the following testcases: ")
- print_to_log("SmokeTest:: testcase number: {} ".format(idx_list))
+ print_to_log("SmokeTest: start to carry out the following testcases: ")
+ print_to_log("SmokeTest: testcase number: {} ".format(idx_list))
+ open_wlan()
fail_idx_list = []
fail_name_list = []
smoke_first_failed = ''
@@ -434,26 +442,16 @@ if __name__ == "__main__":
single_app = all_app[idx]
sys.stdout.flush()
call_app_cmd = single_app['entry']
- capture_screen_cmd = "snapshot_display -f /data/screen_test/{}_{}"
- print_to_log("\nSmokeTest:: ##### case {} : {} test start #####".format(idx, single_app['app_name']))
- with open(os.path.join(args.save_path, 'test_{}.bat'.format(args.device_num)),\
- mode='a', encoding='utf-8') as cmd_f:
- cmd_f.write("\nSmokeTest::::::case {} --- {} test start \n".format(idx, single_app['app_name']))
- cmd_f.close()
+ capture_screen_cmd = "snapshot_display -f /data/local/tmp/screen_test/{}_{}"
+ print_to_log("\nSmokeTest: ##### case {} : {} test start #####".format(idx, single_app['app_name']))
testcnt = 3
while testcnt:
testok = 0
if testcnt != 3:
- print_to_log("SmokeTest:: this testcase try again >>>>>>:\n")
- with open(os.path.join(args.save_path, 'test_{}.bat'.format(args.device_num)),\
- mode='a', encoding='utf-8') as cmd_f:
- cmd_f.write("\nSmokeTest::::::Last failed, try again \n")
- cmd_f.close()
- if idx == 1:
- testcnt = 1
+ print_to_log("SmokeTest: this testcase try again >>>>>>:\n")
if single_app['entry'] != "":
enter_shell_cmd(call_app_cmd, WAIT_TIME_FOUR)
- print_to_log("SmokeTest:: execute command {}".format(single_app['all_actions']))
+ print_to_log("SmokeTest: execute command {}".format(single_app['all_actions']))
prefix = args.device_num
raw_pic_name = ''
pic_name = ''
@@ -463,9 +461,9 @@ if __name__ == "__main__":
pic_name = "{}{}".format(single_action[2], ".jpeg")
else:
pic_name = "{}{}".format(single_app['app_name'], ".jpeg")
- enter_shell_cmd("rm /data/screen_test/*{}".format(pic_name), WAIT_TIME_ONE)
- enter_shell_cmd(capture_screen_cmd.format(prefix, pic_name), WAIT_TIME_ONE)
- file_from_dev("/data/screen_test/{}_{}".format(prefix, pic_name), args.save_path)
+ enter_shell_cmd("rm /data/local/tmp/screen_test/*{}".format(pic_name))
+ enter_shell_cmd(capture_screen_cmd.format(prefix, pic_name))
+ file_from_dev("/data/local/tmp/screen_test/{}_{}".format(prefix, pic_name), args.save_path)
next_cmd = ""
elif type(single_action[1]) == str and single_action[1] == 'cmp_twice':
next_cmd = ""
@@ -476,15 +474,15 @@ if __name__ == "__main__":
crop_picture(prefix, pic, crop_range)
first_similarity = cmp_picture(prefix, pic)
second_similarity = cmp_picture(prefix, pic, WAIT_TIME_TWO)
- print_to_log("SmokeTest:: first picture similarity is {}%".format(first_similarity))
- print_to_log("SmokeTest:: second picture similarity is {}%".format(second_similarity))
+ print_to_log("SmokeTest: first picture similarity is {}%".format(first_similarity))
+ print_to_log("SmokeTest: second picture similarity is {}%".format(second_similarity))
if first_similarity >= similarity or second_similarity >= similarity:
if testok != -1:
testok = 1
- print_to_log("SmokeTest:: {} screenshot check is ok".format(pic))
+ print_to_log("SmokeTest: {} screenshot check is ok".format(pic))
else:
testok = -1
- print_to_log("SmokeTest:: {} screenshot check is abnarmal".format(pic))
+ print_to_log("SmokeTest: {} screenshot check is abnarmal".format(pic))
elif type(single_action[1]) == str and single_action[1] == 'cmp_cmd-level':
next_cmd = ""
sys.stdout.flush()
@@ -493,34 +491,34 @@ if __name__ == "__main__":
else:
similarity = global_pos['cmp_cmd-level'][1]
similarity = int(similarity)
- print_to_log("SmokeTest:: start to contrast screenshot")
+ print_to_log("SmokeTest: start to contrast screenshot")
pic = "{}{}".format(single_action[2], ".jpeg")
crop_range = [80, 1200, 0, 720]
crop_picture(prefix, pic, crop_range)
pic_similarity = cmp_picture(prefix, pic)
- print_to_log("SmokeTest:: picture similarity is {}%".format(pic_similarity))
+ print_to_log("SmokeTest: picture similarity is {}%".format(pic_similarity))
if len(single_action) >= 3:
if pic_similarity >= similarity:
if testok != -1:
testok = 1
- print_to_log("SmokeTest:: {} screenshot check is ok".format(pic))
+ print_to_log("SmokeTest: {} screenshot check is ok".format(pic))
else:
testok = -1
- print_to_log("SmokeTest:: {} screenshot check is abnarmal".format(pic))
+ print_to_log("SmokeTest: {} screenshot check is abnarmal".format(pic))
elif type(single_action[1]) == str and single_action[1] == 'install_hap':
next_cmd = ""
if len(single_action) == 3:
- enter_cmd("hdc_std -t {} install \"{}\"".format(args.device_num,\
+ enter_cmd("hdc -t {} install \"{}\"".format(args.device_num,\
os.path.normpath(os.path.join(args.tools_path, single_action[2]))))
elif type(single_action[1]) == str and single_action[1] == 'get_file_from_dev':
next_cmd = ""
if len(single_action) == 3:
- enter_cmd("hdc_std -t {} file recv \"{}\" \"{}\"".format(args.device_num,\
+ enter_cmd("hdc -t {} file recv \"{}\" \"{}\"".format(args.device_num,\
single_action[2], os.path.normpath(args.save_path)))
elif type(single_action[1]) == str and single_action[1] == 'send_file_to_dev':
next_cmd = ""
if len(single_action) == 4:
- enter_cmd("hdc_std -t {} file send \"{}\" \"{}\"".format(args.device_num,\
+ enter_cmd("hdc -t {} file send \"{}\" \"{}\"".format(args.device_num,\
os.path.normpath(os.path.join(args.tools_path, single_action[2])), single_action[3]))
elif type(single_action[1]) == str and single_action[1] == 'connect_wifi':
next_cmd = ""
@@ -541,12 +539,12 @@ if __name__ == "__main__":
findsome = result.find(single_action[2], 0, len(result))
if findsome != -1:
testok = -1
- print_to_log("SmokeTest:: \"{}\" error:find fatal crash \"{}\"!".format(single_action[1],\
+ print_to_log("SmokeTest: \"{}\" error:find fatal crash \"{}\"!".format(single_action[1],\
single_action[2]))
sys_exit()
else:
testok = 1
- print_to_log("SmokeTest:: \"{}\" result is ok, not find fatal\
+ print_to_log("SmokeTest: \"{}\" result is ok, not find fatal\
crash \"{}\"!".format(single_action[1], single_action[2]))
sys.stdout.flush()
elif type(single_action[1]) == str:
@@ -562,12 +560,10 @@ if __name__ == "__main__":
findsome = result.find(target_[1], 0, len(result))
if findsome != -1:
testok = 1
- print_to_log("SmokeTest:: \"{}\" check result is ok, find \"{}\"!".format(target_[0],\
- target_[1]))
+ print_to_log("SmokeTest: \"{}\" check ok, find \"{}\"!".format(target_[0], target_[1]))
else:
testok = -1
- print_to_log("SmokeTest:: \"{}\" result is not ok, not find \"{}\"!".format(target_[0],\
- target_[1]))
+ print_to_log("SmokeTest: \"{}\" check failed, no \"{}\"!".format(target_[0],target_[1]))
sys.stdout.flush()
else:
next_cmd = "uinput -M -m {} {} -c 0".format(target_[0], target_[1])
@@ -576,19 +572,19 @@ if __name__ == "__main__":
enter_shell_cmd(next_cmd, single_action[0])
if testok == 1:
- print_to_log("SmokeTest:: testcase {}, {} is ok!".format(idx, single_app['app_name']))
+ print_to_log("SmokeTest: testcase {}, {} is ok!".format(idx, single_app['app_name']))
testcnt = 0
elif testok == -1 and smoke_first_failed == '':
if testcnt == 1:
fail_idx_list.append(idx)
fail_name_list.append(single_app['app_name'])
smoke_first_failed = single_app['app_name']
- print_to_log("SmokeTest:: error:testcase {}, {} is failed!".format(idx, single_app['app_name']))
+ print_to_log("SmokeTest: error:testcase {}, {} is failed!".format(idx, single_app['app_name']))
testcnt -= 1
elif testok == -1 and smoke_first_failed != '':
fail_idx_list.append(idx)
fail_name_list.append(single_app['app_name'])
- print_to_log("SmokeTest:: error:testcase {}, {} is failed!".format(idx, single_app['app_name']))
+ print_to_log("SmokeTest: error:testcase {}, {} is failed!".format(idx, single_app['app_name']))
testcnt = 0
else:
testcnt = 0
@@ -600,28 +596,27 @@ if __name__ == "__main__":
fail_str_list = [str(x) for x in fail_idx_list]
reboot_test_num = " ".join(fail_str_list)
if len(fail_idx_list) != 0:
- print_to_log("SmokeTest:: failed testcase number: {} ".format(fail_str_list))
- print_to_log("SmokeTest:: check \"reboot\" in reboot.txt".format(args.save_path))
+ print_to_log("SmokeTest: failed testcase number: {} ".format(fail_str_list))
+ print_to_log("SmokeTest: check \"reboot\" in reboot.txt".format(args.save_path))
with open(os.path.normpath(os.path.join(args.tools_path, "reboot.txt")), mode='a+') as f:
f.seek(0)
reboot_result = f.read()
f.close()
if len(reboot_result) < 1 and reboot_cnt >= 1:
- print_to_log("SmokeTest:: \"reboot\" is not found in the reboot.txt")
- print_to_log("SmokeTest:: the device will reboot and try the failed testcase")
- print_to_log("SmokeTest:: mkdir {}\\reboot".format(args.save_path))
+ print_to_log("SmokeTest: no \"reboot\" found in the reboot.txt")
+ print_to_log("SmokeTest: the device will reboot and try the failed testcase")
+ print_to_log("SmokeTest: mkdir {}\\reboot".format(args.save_path))
os.system("mkdir {}\\reboot".format(args.save_path))
- print_to_log("SmokeTest:: write \"reboot\" into reboot.txt".format(args.save_path))
+ print_to_log("SmokeTest: write \"reboot\" into reboot.txt".format(args.save_path))
with open(os.path.normpath(os.path.join(args.tools_path, "reboot.txt")), mode='w') as f:
f.write("reboot")
f.close()
- print_to_log("SmokeTest:: error: name {}, index {}, failed, rm /data/* and reboot".format(fail_name_list,\
- fail_idx_list))
+ print_to_log("SmokeTest: error: name {}, index {}, failed, reboot".format(fail_name_list,fail_idx_list))
enter_shell_cmd("rm -rf /data/* && reboot")
- reboot_result_list = enter_cmd("hdc_std list targets", 2)
+ reboot_result_list = enter_cmd("hdc list targets", 2)
number = 0
while args.device_num not in reboot_result_list and number < 15:
- reboot_result_list = enter_cmd("hdc_std list targets", 2)
+ reboot_result_list = enter_cmd("hdc list targets", 2)
number += 1
enter_shell_cmd("rm /data/log/hilog/*;hilog -r;hilog -w start -l 400000000 -m none", 1)
py_cmd = os.system("python {}\\resource\\capturescreentest.py --config \
@@ -635,10 +630,9 @@ if __name__ == "__main__":
else:
sys.exit(101)
else:
- print_to_log("SmokeTest:: error: name {}, index {}, these testcase is failed".format(fail_name_list,\
- fail_idx_list))
+ print_to_log("SmokeTest: error: name {}, index {}, failed".format(fail_name_list, fail_idx_list))
sys_exit()
else:
- print_to_log("SmokeTest:: all testcase is ok")
- print_to_log("SmokeTest:: End of check, test succeeded!")
+ print_to_log("SmokeTest: all testcase is ok")
+ print_to_log("SmokeTest: End of check, test succeeded!")
sys.exit(0)
diff --git a/cases/smoke/basic/screenshot32/resource/contacts.jpeg b/cases/smoke/basic/screenshot32/resource/contacts.jpeg
index 5903b54bf7d6b88151a4e59942b9379b0360a7dd..b1b2da07ce99ec395d08f9f12a1d5fee901472a2 100644
Binary files a/cases/smoke/basic/screenshot32/resource/contacts.jpeg and b/cases/smoke/basic/screenshot32/resource/contacts.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/distributedmusicplayer.jpeg b/cases/smoke/basic/screenshot32/resource/distributedmusicplayer.jpeg
index 7053239010419f6b03c819868801792a0941cacc..486811b6443543831fe8220666d46b1a37dce2fb 100644
Binary files a/cases/smoke/basic/screenshot32/resource/distributedmusicplayer.jpeg and b/cases/smoke/basic/screenshot32/resource/distributedmusicplayer.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/launcher.jpeg b/cases/smoke/basic/screenshot32/resource/launcher.jpeg
index 3ebdcd145e634c8fdba25a06e8605ad691a03f88..366633338b51727b7a12fbfad943a6c05793b069 100644
Binary files a/cases/smoke/basic/screenshot32/resource/launcher.jpeg and b/cases/smoke/basic/screenshot32/resource/launcher.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/mms.jpeg b/cases/smoke/basic/screenshot32/resource/mms.jpeg
index 80d5ceeffddf406441fea01b3109742a30a7ed54..ea9f7891cdbf95886e357d2b935778f97fd444b7 100644
Binary files a/cases/smoke/basic/screenshot32/resource/mms.jpeg and b/cases/smoke/basic/screenshot32/resource/mms.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/note.jpeg b/cases/smoke/basic/screenshot32/resource/note.jpeg
index ca261cf9d5b74918186cacbae9686aca47021947..0202c30a7a1341f8d09346b488a7fc116b6be155 100644
Binary files a/cases/smoke/basic/screenshot32/resource/note.jpeg and b/cases/smoke/basic/screenshot32/resource/note.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/notification_bar.jpeg b/cases/smoke/basic/screenshot32/resource/notification_bar.jpeg
index 3df52e44d3ea6026c0ebdb4b5c23b36942002ee8..530437b89baf164e28d3aefc4dfc339b97629bf3 100644
Binary files a/cases/smoke/basic/screenshot32/resource/notification_bar.jpeg and b/cases/smoke/basic/screenshot32/resource/notification_bar.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/photos.jpeg b/cases/smoke/basic/screenshot32/resource/photos.jpeg
index faccdff55629484ed30f8e18adec2bc07d1cc41d..5bed7c539ad8769bf36315e604ac95147da649bc 100644
Binary files a/cases/smoke/basic/screenshot32/resource/photos.jpeg and b/cases/smoke/basic/screenshot32/resource/photos.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/process.txt b/cases/smoke/basic/screenshot32/resource/process.txt
index 4899b7b233f09de69b3134d8761e6cf9aa277255..99efc488774faa8748c0918954b6392ab6091497 100644
--- a/cases/smoke/basic/screenshot32/resource/process.txt
+++ b/cases/smoke/basic/screenshot32/resource/process.txt
@@ -10,7 +10,6 @@ appspawn
hilogd
samgr
storage_daemon
-udevd
uinput_inject
multimodalinput
huks_service
@@ -30,7 +29,6 @@ distributeddata
useriam
inputmethod_ser
ui_service
-distributedfile
netmanager
sensors
media_service
@@ -41,7 +39,6 @@ telephony
camera_service
foundation
hdcd
-disp_gralloc_host
light_host
vibrator_host
sensor_host
diff --git a/cases/smoke/basic/screenshot32/resource/settings.jpeg b/cases/smoke/basic/screenshot32/resource/settings.jpeg
index bc8f8b8ee3ebf3d2e93688dd708e43747bf16c3e..46cf0d6f4055151eac00239d8e846c22a2ec0c8d 100644
Binary files a/cases/smoke/basic/screenshot32/resource/settings.jpeg and b/cases/smoke/basic/screenshot32/resource/settings.jpeg differ
diff --git a/cases/smoke/basic/screenshot32/resource/wifi.jpeg b/cases/smoke/basic/screenshot32/resource/wifi.jpeg
index 8d06193cdf300b62ea825f5a438856018fb4fc20..d0a0b17e1e7e69c50c9c6144d434e74b2e411a8e 100644
Binary files a/cases/smoke/basic/screenshot32/resource/wifi.jpeg and b/cases/smoke/basic/screenshot32/resource/wifi.jpeg differ
diff --git a/tools/components/get_components.py b/tools/components/get_components.py
new file mode 100644
index 0000000000000000000000000000000000000000..bec57d7508ae5a4160cae75478a42cb5fff9b0d7
--- /dev/null
+++ b/tools/components/get_components.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file finds components of config.json.
+
+import argparse
+import json
+import os
+
+
+class Analyzer:
+ @classmethod
+ def get_components(cls, config: str, output_file: str):
+ mandatory_components = list()
+ optional_components = list()
+ components = dict()
+ with open(config, 'r', encoding='utf-8') as r:
+ config_json = json.load(r)
+ inherit = config_json['inherit']
+ for json_name in inherit:
+ with open(json_name, 'r', encoding='utf-8') as r:
+ inherit_file = json.load(r)
+ for subsystem in inherit_file['subsystems']:
+ for component in subsystem['components']:
+ mandatory_components.append(component['component'])
+ for subsystem in config_json['subsystems']:
+ for component in subsystem['components']:
+ if component not in mandatory_components:
+ optional_components.append(component['component'])
+ components["components"] = mandatory_components + optional_components
+ with os.fdopen(os.open(output_file + ".json", os.O_WRONLY | os.O_CREAT, mode=0o640), "w") as fd:
+ json.dump(components, fd, indent=4)
+
+
+def get_args():
+ parser = argparse.ArgumentParser(
+ description=f"analyze components deps.\n")
+ parser.add_argument("-c", "--config_json", required=True, type=str,
+ help="path of root path of openharmony/vendor/hihope/{product_name}/config.json")
+ parser.add_argument("-o", "--output_file", type=str, default="components",
+ help="eg: name of output_json_file")
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = get_args()
+ config_json_path = args.config_json
+ output_file_name = args.output_file
+ Analyzer.get_components(config_json_path, output_file_name)
\ No newline at end of file
diff --git a/tools/components_deps/README.md b/tools/components_deps/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1142e657bf555f323b466e6db3812512be43f20e
--- /dev/null
+++ b/tools/components_deps/README.md
@@ -0,0 +1,69 @@
+# components_deps_analyzer.py
+
+## 功能介绍
+
+基于vendor下的config.json、xml文件中的开源部件集、BUILD.gn文件,分析是否存在对闭源部件的依赖以及是否存在对非必选部件的无条件依赖。
+
+结果以json格式进行存储。
+
+## 支持产品
+
+config.json文件主要是关于rk3568系列,已测试产品包括rk3568、rk3568_mini_system、pc_mini_system、tablet_mini_system、phone_mini_system的config.json文件
+
+## 使用说明
+
+前置条件:
+
+1. 获取BUILD.gn文件
+1. 获取包含开源部件集的xml文件
+1. 获取包含部件集定义的config.json文件
+1. 获取BUILD.gn文件对应的name
+
+命令介绍:
+
+1. `-h`或`--help`命令查看帮助
+ ```shell
+ > python components_deps_analyzer.py --help
+ usage: components_deps_analyzer.py [-h] -p COMPONENTS_GN_PATH_LIST -g GN_NAME -c CONFIG_PATH -o OPEN_COMPONENT_XML_PATH [-r RESULT_JSON_NAME]
+
+ analyze components deps.
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -p COMPONENTS_GN_PATH_LIST, --components_gn_path_list COMPONENTS_GN_PATH_LIST
+ path of pr BUILD.gn
+ -g GN_NAME, --gn_name GN_NAME
+ gn file corresponding name
+ -c CONFIG_PATH, --config_path CONFIG_PATH
+ path of config_file
+ -o OPEN_COMPONENT_XML_PATH, --open_component_xml_path OPEN_COMPONENT_XML_PATH
+ open component name set
+ -r RESULT_JSON_NAME, --result_json_name RESULT_JSON_NAME
+ name of output_json
+
+
+ ```
+1. 使用示例
+ ```shell
+ python components_deps_analyzer.py -p BUILD.gn,pkgs/BUILD.gn -g third_party_curl,third_party_zlib -c config_path -o .\gn_xml\ohos.xml
+ ```
+
+## 输出格式介绍(result.json)
+
+```
+[
+ {
+ "file_path": BUILD.gn文件路径,
+ "error": [
+ {
+ "line": 行号,
+ "code": 行号对应代码,
+ "rule": 触发规则,
+ "detail": 详细说明
+ },
+ ...
+ ]
+ },
+ ...
+]
+```
\ No newline at end of file
diff --git a/tools/components_deps/components_deps_analyzer.py b/tools/components_deps/components_deps_analyzer.py
new file mode 100644
index 0000000000000000000000000000000000000000..a96f29aea6d52cff2c2451b2ad3ca37e4fcce150
--- /dev/null
+++ b/tools/components_deps/components_deps_analyzer.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file provide the detection tool for unconditional dependence of required components on optional components.
+
+import argparse
+import json
+import os
+import re
+
+
+class Analyzer:
+ @classmethod
+ def __get_open_components(cls, xml_path):
+ open_components = list()
+ gn_name = list()
+ white_components_list = ["common"]
+ with open(xml_path, 'r', encoding='utf-8') as r:
+ xml_info = r.readlines()
+ for line in xml_info:
+ if "path=" in line:
+ one_component = re.findall('path="(.*?)"', line)[0].split('/')[-1]
+ open_components.append(one_component)
+ gn_name.append(re.findall('name="(.*?)"', line)[0])
+ if "third_party" in gn_name:
+ white_components_list.append(one_component)
+ return open_components, gn_name, white_components_list
+
+ @classmethod
+ def __deal_config_json(cls, config_json):
+ components = list()
+ for subsystem in config_json['subsystems']:
+ for component in subsystem['components']:
+ if component not in components:
+ components.append(component['component'])
+ return components
+
+ @classmethod
+ def __get_required_components(cls, config_path: str):
+ required_components = list()
+ files = os.listdir(config_path)
+ for file in files:
+ if file.endswith(".json"):
+ with open(os.path.join(config_path, file), 'r', encoding='utf-8') as r:
+ config_json = json.load(r)
+ required_components += cls.__deal_config_json(config_json)
+ return required_components
+
+ @classmethod
+ def __get_line(cls, txt_list, key_words: str):
+ for i, txt in enumerate(txt_list):
+ if key_words in txt:
+ return i + 1
+ return 0
+
+ @classmethod
+ def __judge_deps(cls, gn_path: str, new_line_num: str, open_components_list, optional_components, white_names):
+ error = list()
+ deps = list()
+ dependent_close = True
+ new_line_num = [int(i) for i in new_line_num.split('_')]
+ with open(gn_path, 'r', encoding='utf-8') as r:
+ gn_lines = [line.strip("\n") for line in r.readlines()]
+ txt = ''
+ for line in gn_lines:
+ txt += line
+ in_if_txt = ' '.join(re.findall('if \(.+?\{(.*?)\}', txt))
+ for component in open_components_list:
+ if dependent_close == True:
+ if component in txt:
+ dependent_close = False
+ for i in new_line_num:
+ dep_txt = re.findall('deps = \[(.*?)\]', gn_lines[i - 1]) + re.findall('deps += \[(.*?)\]', gn_lines[i - 1])
+ dep_info = list()
+ for info in dep_txt:
+ if '/' in info:
+ dep_info += re.findall('/(.*?):', info)
+ else:
+ dep_info += re.findall('"(.*?):', info)
+ for component in optional_components:
+ if component not in white_names and component in dep_info and component not in in_if_txt:
+ deps.append((component, i))
+ if dependent_close == True and re.findall('deps =', txt):
+ line = cls.__get_line(gn_lines, 'deps =')
+ error.append(
+ {"line": line, "code": gn_lines[line - 1].strip(), "rule": "depend close component",
+ "detail": "可能依赖闭源部件,请检查deps中的内容"})
+ for one_dep in deps:
+ error.append(
+ {"line": one_dep[1], "code": gn_lines[one_dep[1] - 1].strip(), "rule": "depend optional component",
+ "detail": "依赖开源部件中的非必选部件{},请检查deps中的内容".format(one_dep[0])})
+ return error
+
+ @classmethod
+ def analysis(cls, gn_path_list, new_line_nums, gn_name, config_path: str, open_components_path,
+ result_json_name: str):
+ if not os.path.exists(config_path):
+ print("error: {} is inaccessible or not found".format(config_path))
+ return
+ if not os.path.exists(open_components_path):
+ print("error: {} is inaccessible or not found".format(open_components_path))
+ return
+ if len(gn_path_list) != len(new_line_nums):
+ print(
+ "error: The new_line_nums and the gn_path are not in one-to-one correspondence.")
+ return
+ if len(gn_path_list) != len(gn_name):
+ print(
+ "error: The gn_path and gn_name are not in one-to-one correspondence.")
+ return
+ required_components = cls.__get_required_components(config_path)
+ open_components, gn_name_list, white_list = cls.__get_open_components(open_components_path)
+ gn_name2component = dict(zip(gn_name_list, open_components))
+ optional_components = list()
+ for components in open_components:
+ if components not in required_components:
+ optional_components.append(components)
+ result = list()
+ for i, _ in enumerate(gn_path_list):
+ one_result = dict()
+ one_result["file_path"] = gn_path_list[i]
+ if gn_name[i] in gn_name_list and gn_name2component[gn_name[i]] in required_components:
+ one_result["error"] = cls.__judge_deps(gn_path_list[i], new_line_nums[i], open_components,
+ optional_components, white_list)
+ else:
+ one_result["file_path"], one_result["error"] = gn_name_list[i], []
+ result.append(one_result)
+ with os.fdopen(os.open(result_json_name + ".json", os.O_WRONLY | os.O_CREAT, mode=0o640), "w",
+ encoding='utf-8') as fd:
+ json.dump(result, fd, indent=4, ensure_ascii=False)
+
+
+def get_args():
+ parser = argparse.ArgumentParser(
+ description=f"analyze components deps.\n")
+ parser.add_argument("-p", "--components_gn_path_list", required=True, type=str,
+ help="path of pr BUILD.gn")
+ parser.add_argument("-n", "--new_line_nums_list", required=True, type=str,
+ help="eg: 1_2_3,4_5")
+ parser.add_argument("-g", "--gn_name", required=True, type=str,
+ help="gn file corresponding name")
+ parser.add_argument("-c", "--config_path", required=True, type=str,
+ help="path of config_file")
+ parser.add_argument("-o", "--open_component_xml_path", required=True, type=str,
+ help="open component name set")
+ parser.add_argument("-r", "--result_json_name", type=str, default="result",
+ help="name of output_json")
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = get_args()
+ gn_path_list_name = args.components_gn_path_list.split(',')
+ new_line_nums_list = args.new_line_nums_list.split(',')
+ gn_component_name = args.gn_name.split(',')
+ config_path = args.config_path
+ open_components_xml_path = args.open_component_xml_path
+ result_json = args.result_json_name
+ Analyzer.analysis(gn_path_list_name, new_line_nums_list, gn_component_name, config_path, open_components_xml_path,
+ result_json)
\ No newline at end of file
diff --git a/tools/deps_guard/deps_guard.py b/tools/deps_guard/deps_guard.py
index fbd7e023394982ea40b9d713d0ed19bb376bb05f..71ac61fc7709ee699a5a7b73ea3a888b7dcb707a 100755
--- a/tools/deps_guard/deps_guard.py
+++ b/tools/deps_guard/deps_guard.py
@@ -19,37 +19,54 @@
from elf_file_mgr import ElfFileMgr
def __createArgParser():
- import argparse
+ import argparse
- parser = argparse.ArgumentParser(description='Check architecture information from compiled output files.')
+ parser = argparse.ArgumentParser(description='Check architecture information from compiled output files.')
- parser.add_argument('-i', '--input',
- help='input asset files root directory', required=True)
+ parser.add_argument('-i', '--input',
+ help='input asset files root directory', required=True)
- parser.add_argument('-r', '--rules', action='append',
- help='rules directory', required=False)
+ parser.add_argument('-r', '--rules', action='append',
+ help='rules directory', required=False)
- parser.add_argument('-n', '--no-fail',
- help='force to pass all rules', required=False)
+ parser.add_argument('-n', '--no-fail',
+ help='force to pass all rules', required=False)
- return parser
+ return parser
-def deps_guard(out_path, args=None):
- mgr = ElfFileMgr(out_path)
- mgr.scan_all_files()
+def _deps_guard_module(out_path, args=None):
+ mgr = ElfFileMgr(out_path)
+ mgr.scan_all_files()
+
+ from rules_checker import check_all_rules
+
+ passed = check_all_rules(mgr, args)
+ if passed:
+ print("All rules passed")
+ return
- from rules_checker import check_all_rules
+ raise Exception("ERROR: deps_guard failed.")
- passed = check_all_rules(mgr, args)
- if passed:
- print("All rules passed")
- return
+def _startup_guard_module(out_path, args):
+ import sys
+ import os
+ for path in sys.path:
+ if path.endswith("developtools/integration_verification/tools/deps_guard"):
+ sys.path.append(os.path.join(
+ path, "../startup_guard"))
+ break
- raise Exception("ERROR: deps_guard failed.")
+ from startup_guard import startup_guard
+
+ startup_guard(out_path, args)
+
+def deps_guard(out_path, args=None):
+ _deps_guard_module(out_path, args)
+ #_startup_guard_module(out_path, args)
if __name__ == '__main__':
- parser = __createArgParser()
- args = parser.parse_args()
+ parser = __createArgParser()
+ args = parser.parse_args()
- deps_guard(args.input, args)
+ deps_guard(args.input, args)
diff --git a/tools/deps_guard/elf_file_mgr/module_info/compile_info_loader.py b/tools/deps_guard/elf_file_mgr/module_info/compile_info_loader.py
index e9ac1fa7f56f3b61576117dea2323828249ec8c4..1bd0932b07afe9321df77567b68863be95a33b48 100755
--- a/tools/deps_guard/elf_file_mgr/module_info/compile_info_loader.py
+++ b/tools/deps_guard/elf_file_mgr/module_info/compile_info_loader.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-#coding=utf-8
+# coding=utf-8
#
# Copyright (c) 2022 Huawei Device Co., Ltd.
@@ -19,224 +19,252 @@
import os
import json
+
class CompileInfoLoader(object):
- @staticmethod
- def __load_output_module_info(product_out_path):
- try:
- with open(os.path.join(product_out_path, "packages/phone/system_module_info.json")) as f:
- modules = json.load(f)
- except:
- print("file info not found.")
- return None
-
- res = []
- for item in modules:
- info = {}
- info["name"] = item["dest"][0]
- if info["name"].startswith("updater/"):
- if len(item["dest"]) > 1:
- info["name"] = item["dest"][1]
- else:
- continue
-
- if "label" in item:
- info["labelPath"] = item["label"]
- else:
- info["labelPath"] = ""
- if info["labelPath"].find("(") > 0:
- info["labelPath"] = info["labelPath"][:info["labelPath"].find("(")]
- if "subsystem_name" in item:
- info["subsystem"] = item["subsystem_name"]
- else:
- if info["labelPath"].startswith("//build/common"):
- info["subsystem"] = "commonlibrary"
- else:
- info["subsystem"] = "unknown"
- if "part_name" in item:
- info["componentName"] = item["part_name"]
- else:
- if info["labelPath"].startswith("//build/common"):
- info["componentName"] = "c_utils"
- else:
- info["componentName"] = "unknown"
- if "label_name" in item:
- info["moduleName"] = item["label_name"]
- else:
- info["moduleName"] = ""
- if "version_script" in item:
- info["version_script"] = item["version_script"]
- info["third_party"] = False
- info["chipset"] = False
- info["napi"] = False
- info["innerapi"] = False
- info["innerapi_declared"] = False
- if "shlib_type" in item:
- info["shlib_type"] = item["shlib_type"]
- if "innerapi_tags" in item:
- info["innerapi_tags"] = item["innerapi_tags"]
- info["sa_id"] = 0
- res.append(info)
- return res
-
- @staticmethod
- def load(mgr, product_out_path):
- info = CompileInfoLoader.__load_output_module_info(product_out_path)
-
- defaultInfo = {
- "subsystem": "unknown",
- "componentName": "unknown",
- "moduleName": "unknown",
- "third_party": False,
- "chipset": False,
- "napi": False,
- "sa_id": 0,
- "labelPath": "",
- "version_script": "",
- "shlib_type": "",
- "innerapi": False,
- "innerapi_tags": [],
- "innerapi_declared": False
- }
-
- if info:
- for item in info:
- elf = mgr.get_elf_by_path(item["name"])
- if not elf:
- continue
- for k in defaultInfo.keys():
- if k in item:
- elf[k] = item[k]
-
- unknown_items = []
- for elf in mgr.get_all():
- if "componentName" not in elf:
- print("%s does not match in module info file" % (elf["path"]))
- unknown = defaultInfo.copy()
- unknown["name"] = elf["path"]
- unknown["fileName"] = elf["name"]
- for k in defaultInfo.keys():
- elf[k] = defaultInfo[k]
- unknown_items.append(unknown)
- elif elf["componentName"] == "unknown":
- print("%s has no componentName info" % (elf["path"]))
- unknown = defaultInfo.copy()
- unknown["name"] = elf["path"]
- for k in defaultInfo.keys():
- if k in elf:
- defaultInfo[k] = elf[k]
- unknown_items.append(unknown)
-
- if elf["path"].startswith("system/lib64/module/") or elf["path"].startswith("system/lib/module/"):
- elf["napi"] = True
-
- if not elf["path"].startswith("system/"):
- elf["chipset"] = True
-
- # Add if not exists
- if "shlib_type" not in elf:
- elf["shlib_type"] = ""
- if "innerapi_tags" not in elf:
- elf["innerapi_tags"] = []
- if elf["labelPath"].startswith("//third_party/"):
- elf["third_party"] = True
-
- if len(unknown_items) > 0:
- print("%d modules has no component info" % len(unknown_items))
- with open(os.path.join(product_out_path, "unknown.json"), "w") as f:
- res = json.dumps(unknown_items, indent=4)
- f.write(res)
-
- # init platformsdk, chipsetsdk, innerapi flags
- for elf in mgr.get_all():
- elf["deps_internal"] = []
- elf["deps_external"] = []
- elf["dependedBy_internal"] = []
- elf["dependedBy_external"] = []
-
- elf["modGroup"] = "private"
- elf["platformsdk"] = False
- elf["chipsetsdk"] = False
-
- elf["hdiType"] = ""
- if elf["shlib_type"] == "hdi_proxy":
- elf["hdiType"] = "hdi_proxy" # HDI proxy client library
- elif elf["shlib_type"] == "hdi_stub":
- elf["hdiType"] = "hdi_stub" # HDI proxy client library
-
- if elf["name"] in ("libc.so", "libc++.so", "libhilog.so"):
- elf["innerapi"] = True
-
- # Highest priority
- if elf["napi"]:
- elf["modGroup"] = "publicapi"
-
- if elf["sa_id"] > 0 or elf["type"] == "bin":
- elf["modGroup"] = "pentry"
-
- # for component dependedBy_internal and dependedBy_external
-
- platformsdks = []
- chipsetsdks = []
- innerapi_ccs = []
-
- for dep in mgr.get_all_deps():
- caller = dep["caller"]
- callee = dep["callee"]
-
- dep["platformsdk"] = False
- dep["chipsetsdk"] = False
- dep["external"] = False
-
- # For Inner API modules detection
- if caller["componentName"] == callee["componentName"]:
- caller["deps_internal"].append(dep)
- callee["dependedBy_internal"].append(dep)
- else:
- caller["deps_external"].append(dep)
- callee["dependedBy_external"].append(dep)
- callee["innerapi"] = True
- dep["external"] = True
-
- callee["modGroup"] = "innerapi_cc" # Cross component
-
- if caller["napi"]:
- caller["modGroup"] = "publicapi"
-
- # For Platform SDK modules detection
- callee["modGroup"] = "innerapi_chc" # Cross high level component
-
- dep["platformsdk"] = True
- callee["platformsdk"] = True
- if callee not in platformsdks:
- platformsdks.append(callee)
- elif caller["chipset"] != callee["chipset"]:
- # For Chipset SDK modules detection
- if callee["modGroup"] not in ("publicapi", "pentry"):
- callee["modGroup"] = "innerapi_chc" # Cross high level component
- if callee["hdiType"] != "hdi_proxy": # hdi proxy modules can be called by both system and chipset
- dep["chipsetsdk"] = True
- callee["chipsetsdk"] = True
- if callee not in chipsetsdks:
- chipsetsdks.append(callee)
- elif dep["external"] == True:
- if callee not in innerapi_ccs:
- innerapi_ccs.append(callee)
-
- # Highest priority
- if caller["napi"]:
- caller["modGroup"] = "publicapi"
- if callee["napi"]:
- callee["modGroup"] = "publicapi"
-
- if caller["sa_id"] > 0 or caller["type"] == "bin":
- caller["modGroup"] = "pentry"
- if callee["sa_id"] > 0 or callee["type"] == "bin":
- callee["modGroup"] = "pentry"
+
+ @staticmethod
+ def __get_modules_from_file(product_out_path_):
+ try:
+ with open(os.path.join(product_out_path_, "packages/phone/system_module_info.json")) as f:
+ modules = json.load(f)
+ return modules
+ except FileNotFoundError:
+ print("file info not found.")
+ return []
+
+
+ @staticmethod
+ def __update_info_with_item(info_,item_):
+ if "version_script" in item_:
+ info_["version_script"] = item_["version_script"]
+ CompileInfoLoader.__fill_default_module_info(info_)
+ if "shlib_type" in item_:
+ info_["shlib_type"] = item_["shlib_type"]
+ if "innerapi_tags" in item_:
+ info_["innerapi_tags"] = item_["innerapi_tags"]
+ info_["sa_id"] = 0
+
+ @staticmethod
+ def __load_output_module_info(product_out_path__):
+ modules = CompileInfoLoader.__get_modules_from_file(product_out_path_=product_out_path__)
+ res = []
+ for item in modules:
+ info = {}
+ info["name"] = item["dest"][0]
+ if info["name"].startswith("updater/"):
+ if len(item["dest"]) > 1:
+ info["name"] = item["dest"][1]
+ else:
+ continue
+
+ if "label" in item:
+ info["labelPath"] = item["label"]
+ else:
+ info["labelPath"] = ""
+ if info["labelPath"].find("(") > 0:
+ info["labelPath"] = info["labelPath"][:info["labelPath"].find("(")]
+ if "subsystem_name" in item:
+ info["subsystem"] = item["subsystem_name"]
+ else:
+ if info["labelPath"].startswith("//build/common"):
+ info["subsystem"] = "commonlibrary"
+ else:
+ info["subsystem"] = "unknown"
+ if "part_name" in item:
+ info["componentName"] = item["part_name"]
+ else:
+ if info["labelPath"].startswith("//build/common"):
+ info["componentName"] = "c_utils"
+ else:
+ info["componentName"] = "unknown"
+ if "label_name" in item:
+ info["moduleName"] = item["label_name"]
+ else:
+ info["moduleName"] = ""
+ CompileInfoLoader.__update_info_with_item(info_=info,item_=item)
+ res.append(info)
+ return res
+
+ @staticmethod
+ def __fill_default_module_info(info_):
+ info_["third_party"] = False
+ info_["chipset"] = False
+ info_["napi"] = False
+ info_["innerapi"] = False
+ info_["innerapi_declared"] = False
+
+ @staticmethod
+ def load(load_mgr, product_out_path):
+ info = CompileInfoLoader.__load_output_module_info(product_out_path)
+
+ default_info = CompileInfoLoader.__get_default_info()
+
+ if info:
+ for item in info:
+ elf = load_mgr.get_elf_by_path(item["name"])
+ if not elf:
+ continue
+ for k in default_info.keys():
+ if k in item:
+ elf[k] = item[k]
+
+ unknown_items = []
+ for elf in load_mgr.get_all():
+ if "componentName" not in elf:
+ print("%s does not match in module info file" % (elf["path"]))
+ unknown = default_info.copy()
+ unknown["name"] = elf["path"]
+ unknown["fileName"] = elf["name"]
+ for k in default_info.keys():
+ elf[k] = default_info[k]
+ unknown_items.append(unknown)
+ elif elf["componentName"] == "unknown":
+ print("%s has no componentName info" % (elf["path"]))
+ unknown = default_info.copy()
+ unknown["name"] = elf["path"]
+ for k in default_info.keys():
+ if k in elf:
+ default_info[k] = elf[k]
+ unknown_items.append(unknown)
+
+ if elf["path"].startswith("system/lib64/module/") or elf["path"].startswith("system/lib/module/"):
+ elf["napi"] = True
+
+ if not elf["path"].startswith("system/"):
+ elf["chipset"] = True
+
+ # Add if not exists
+ if "shlib_type" not in elf:
+ elf["shlib_type"] = ""
+ if "innerapi_tags" not in elf:
+ elf["innerapi_tags"] = []
+ if elf["labelPath"].startswith("//third_party/"):
+ elf["third_party"] = True
+
+ if len(unknown_items) > 0:
+ print("%d modules has no component info" % len(unknown_items))
+ with open(os.path.join(product_out_path, "unknown.json"), "w") as f:
+ res = json.dumps(unknown_items, indent=4)
+ f.write(res)
+
+ # init platformsdk, chipsetsdk, innerapi flags
+ CompileInfoLoader.__set_elf_default_value(load_mgr)
+
+ # for component dependedBy_internal and dependedBy_external
+ CompileInfoLoader.__update_deps(load_mgr)
+
+ @staticmethod
+ def __get_default_info():
+ return {
+ "subsystem": "unknown",
+ "componentName": "unknown",
+ "moduleName": "unknown",
+ "third_party": False,
+ "chipset": False,
+ "napi": False,
+ "sa_id": 0,
+ "labelPath": "",
+ "version_script": "",
+ "shlib_type": "",
+ "innerapi": False,
+ "innerapi_tags": [],
+ "innerapi_declared": False
+ }
+
+ @staticmethod
+ def __set_elf_default_value(mgr_):
+ for elf in mgr_.get_all():
+ elf["deps_internal"] = []
+ elf["deps_external"] = []
+ elf["dependedBy_internal"] = []
+ elf["dependedBy_external"] = []
+
+ elf["modGroup"] = "private"
+ elf["platformsdk"] = False
+ elf["chipsetsdk"] = False
+
+ elf["hdiType"] = ""
+ if elf["shlib_type"] == "hdi_proxy":
+ elf["hdiType"] = "hdi_proxy" # HDI proxy client library
+ elif elf["shlib_type"] == "hdi_stub":
+ elf["hdiType"] = "hdi_stub" # HDI proxy client library
+
+ if elf["name"] in ("libc.so", "libc++.so", "libhilog.so"):
+ elf["innerapi"] = True
+
+ # Highest priority
+ if elf["napi"]:
+ elf["modGroup"] = "publicapi"
+
+ if elf["sa_id"] > 0 or elf["type"] == "bin":
+ elf["modGroup"] = "pentry"
+
+ @staticmethod
+ def __update_deps(mgr_):
+ platformsdks = []
+ chipsetsdks = []
+ innerapi_ccs = []
+
+ for dep in mgr_.get_all_deps():
+ caller = dep["caller"]
+ callee = dep["callee"]
+
+ dep["platformsdk"] = False
+ dep["chipsetsdk"] = False
+ dep["external"] = False
+
+ # For Inner API modules detection
+ if caller["componentName"] == callee["componentName"]:
+ caller["deps_internal"].append(dep)
+ callee["dependedBy_internal"].append(dep)
+ else:
+ caller["deps_external"].append(dep)
+ callee["dependedBy_external"].append(dep)
+ callee["innerapi"] = True
+ dep["external"] = True
+
+ callee["modGroup"] = "innerapi_cc" # Cross component
+
+ if caller["napi"]:
+ caller["modGroup"] = "publicapi"
+
+ # For Platform SDK modules detection
+ callee["modGroup"] = "innerapi_chc" # Cross high level component
+
+ dep["platformsdk"] = True
+ callee["platformsdk"] = True
+ if callee not in platformsdks:
+ platformsdks.append(callee)
+ elif caller["chipset"] != callee["chipset"]:
+ # For Chipset SDK modules detection
+ if callee["modGroup"] not in ("publicapi", "pentry"):
+ callee["modGroup"] = "innerapi_chc" # Cross high level component
+ if callee["hdiType"] != "hdi_proxy": # hdi proxy modules can be called by both system and chipset
+ dep["chipsetsdk"] = True
+ callee["chipsetsdk"] = True
+ if callee["hdiType"] != "hdi_proxy" and callee not in chipsetsdks:
+ chipsetsdks.append(callee)
+ elif dep["external"] == True:
+ if callee not in innerapi_ccs:
+ innerapi_ccs.append(callee)
+
+ # Highest priority
+ if caller["napi"]:
+ caller["modGroup"] = "publicapi"
+ if callee["napi"]:
+ callee["modGroup"] = "publicapi"
+
+ if caller["sa_id"] > 0 or caller["type"] == "bin":
+ caller["modGroup"] = "pentry"
+ if callee["sa_id"] > 0 or callee["type"] == "bin":
+ callee["modGroup"] = "pentry"
+
if __name__ == "__main__":
- import sqlite3
- import elf_modules
- conn = sqlite3.connect("symdb.db")
- cursor = conn.cursor()
+ import sqlite3
+ import elf_modules
+
+ conn = sqlite3.connect("symdb.db")
+ cursor = conn.cursor()
- mgr = elf_modules.ElfModuleMgr(cursor)
+ mgr = elf_modules.ElfModuleMgr(cursor)
diff --git a/tools/deps_guard/elf_file_mgr/sa/sa.py b/tools/deps_guard/elf_file_mgr/sa/sa.py
index 0665fbfb68b7096c04e7374f02818620303c7f8e..d8b1472da3e538982497109e646cbfad67369122 100755
--- a/tools/deps_guard/elf_file_mgr/sa/sa.py
+++ b/tools/deps_guard/elf_file_mgr/sa/sa.py
@@ -23,48 +23,50 @@ import os
import xml.etree.ElementTree as ET
def xml_node_find_by_name(node, name):
- for item in node:
- if item.tag == name:
- return item.text
- return None
+ for item in node:
+ if item.tag == name:
+ return item.text
+ return None
class SAParser(object):
- @staticmethod
- def __parse_sa_profile(all_sa, f):
- root = ET.parse(f).getroot()
- process = xml_node_find_by_name(root, "process")
- for sa in root.findall("systemability"):
- libpath = xml_node_find_by_name(sa, "libpath")
- sa_key = os.path.basename(libpath)
- sa_item = {}
- for item in sa:
- sa_item[item.tag] = item.text
- sa_item["process"] = process
- all_sa[sa_key] = sa_item
+ @staticmethod
+ def __parse_sa_profile(all_sa, full_name):
+ with open(full_name, "r") as f:
+ profile = json.load(f)
+ process = profile["process"]
+ for sa in profile["systemability"]:
+ libpath = sa["libpath"]
+ sa_key = os.path.basename(libpath)
+ sa["process"] = process
+ all_sa[sa_key] = sa
- @staticmethod
- def __add_sa_info(all_sa, mgr):
- if not mgr:
- return
- for mod in mgr.get_all():
- mod["sa_id"] = 0
- if mod["name"] not in all_sa:
- continue
- mod["sa_id"] = int(all_sa[mod["name"]]["name"])
+ @staticmethod
+ def __add_sa_info(all_sa, mgr):
+ if not mgr:
+ return
+ for mod in mgr.get_all():
+ mod["sa_id"] = 0
+ if mod["name"] not in all_sa:
+ continue
+ mod["sa_id"] = int(all_sa[mod["name"]]["name"])
- @staticmethod
- def load(mgr, out_root_path):
- all_sa = {}
- path = os.path.join(out_root_path, "packages/phone/system/profile")
- if not os.path.exists(path):
- return
+ @staticmethod
+ def load(mgr, out_root_path):
+ all_sa = {}
+ path = os.path.join(out_root_path, "packages/phone/system/profile")
+ if not os.path.exists(path):
+ return
- for f in os.listdir(path):
- full_name = os.path.join(path, f)
- if os.path.isfile(full_name) and f.endswith(".xml"):
- try:
- SAParser.__parse_sa_profile(all_sa, full_name)
- except:
- pass
+ for f in os.listdir(path):
+ full_name = os.path.join(path, f)
+ if os.path.isfile(full_name) and f.endswith(".json"):
+ try:
+ SAParser.__parse_sa_profile(all_sa, full_name)
+ except:
+ pass
- SAParser.__add_sa_info(all_sa, mgr)
+ SAParser.__add_sa_info(all_sa, mgr)
+
+if __name__ == '__main__':
+ parser = SAParser()
+ parser.load(None, "/home/handy/qemu/out/rk3568")
diff --git a/tools/deps_guard/rules/ChipsetSDK/README.md b/tools/deps_guard/rules/ChipsetSDK/README.md
index 5a4e825ea640556e928e832a6cfd021912ed618d..5bbdc7f94d4ff570e1ce340e33553e2cf734a2f9 100755
--- a/tools/deps_guard/rules/ChipsetSDK/README.md
+++ b/tools/deps_guard/rules/ChipsetSDK/README.md
@@ -1,4 +1,4 @@
-# ChipsetSDK白名单规则说明
+# Chipset SDK白名单规则说明
@@ -8,66 +8,151 @@
芯片组件模块:安装到chipset.img中的模块,与芯片或硬件强相关。
+
+如上图所示:Chipset SDK是指允许被芯片组件进程加载的系统组件动态库模块集合。
-Chipset SDK是指允许被芯片组件模块依赖的系统组件动态库模块集合。
+Chipset SDK集合中的单个模块称之为Chipset SDK模块。Chipset SDK模块分为两类,在BUILD.gn中通过innerapi_tags字段来标识,可选值为:
-Chipset SDK集合中的单个模块称之为ChipsetSDK模块。
+- chipsetsdk: 芯片组件直接依赖的模块
+- chipsetsdk_indirect: chipsetsdk模块间接依赖的模块
-## 2. 规则解释
-
-Chipset SDK白名单规则有几个方面的含义:
-
-### 2.1 Chipset SDK模块需要在编译模板中标识
-
-如下图所示,每个Chipset SDK模块需要在对应的BUILD.gn中通过innerapi_tags增加chipsetsdk标签来标识其类型:
+在BUILD.gn中的使用样例如下:
```go
-ohos_shared_library(sample_sa_module) {
+ohos_shared_library(sample_chipsetsdk_module) {
...
- innerapi_tags = [ "chipsetsdk" ]
+ innerapi_tags = [ "chipsetsdk|chipsetsdk_indirect" ]
...
}
```
-如果没有此标记,编译时会提示如下类型的告警:
+chipsetsdk和chipsetsdk_indirect类型的模块都需要被芯片组件进程加载,这两个类型的模块都会安装到/chipset/lib{64}/chipset-sdk或/chipset/lib{64}/chipset-pub-sdk目录下,都可以被芯片组件沙盒访问。
-```shell
-[WARNING]: Chipset SDK module libinit_stub_empty.so has no innerapi_tags with "chipsetsdk" or "csdk", add it in //base/startup/init/interfaces/innerkits/init_module_engine:libinit_stub_empty
-```
+相比较于/chipset/lib{64}/chipset-sdk目录,/chipset/lib{64}/chipset-pub-sdk目录下的模块既能被芯片组件沙盒访问,也能被应用进程沙盒访问。
-同样,非Chipset SDK不要增加此标记。
+## 2. 规则解释
-### 2.2 芯片组件模块不允许依赖Chipset SDK集合之外的模块
+Chipset SDK白名单规则有几个方面的含义:
+
+### 2.1 芯片组件模块编译时能且仅能依赖系统组件中的chipsetsdk类型的ChipsetSDK模块
-违法此规则时,会产生如下格式的NOT ALLOWED错误:
+违反此规则时,编译过程会产生如下格式的NOT ALLOWED错误:
```
[NOT ALLOWED]: chipset module libNNN.z.so depends on non Chipset SDK module libsystemMMM.z.so in //NNNpath:libNNN
```
-该错误的含义是:芯片组件的libNNN.z.so模块依赖了libsystemMMM.z.so,而libsystemMMM.z.so并不是被认可(白名单中)的模块。
+该错误的含义是:芯片组件的libNNN.z.so模块依赖了libsystemMMM.z.so,而libsystemMMM.z.so并不是Chipset SDK的模块。
-违法此规则时,参考下一章节的处理方案进行消除:
+处理方法:
-## 3. 违规场景及处理方案建议
+1)检查libNNN.z.so是否需要安装到芯片组件中?如果是系统组件的产物,就不受此规则约束。一般是在BUILD.gn中的install_images字段决定安装到哪个组件中。
-### 3.1 检查违规模块的安装组件是否正确
+2)如果libNNN.z.so确实需要安装到芯片组件中,则分析对libsystemMMM.z.so的依赖是否是必须的。经常会出现libNNN.z.so是一个巨型库,对libsystemMMM.z.so的依赖部分并不会被芯片组件实际使用,此时可以考虑把libNNN.z.so进行拆分,把芯片组件确实需要使用的部分保留在芯片组件中;不需要的部分放在系统组件其它的库里。
-首先确认libNNN.z.so是否应该是芯片组件产物,只有与硬件相关的HDI实现模块才需要安装到芯片组件。
+3)如果以上都不满足,需要联系libsystemMMM.z.so的提供者,把此模块整改为Chipset SDK模块。参考第3章Chipset SDK模块基本要求。
-如下所示,每个模块是通过install_images选项来决定安装到系统组件还是芯片组件;如果系统组件模块错误的安装到芯片组件,可能会因为该模块依赖了Chipset SDK之外的系统组件模块而违反此规则。
+### 2.2 Chipset SDK模块能且仅能依赖其它的Chipset SDK模块
-```go
-ohos_shared_library(libNNN) {
- ...
- install_images = [ chipset_base_dir ]
- ...
-}
+违反此规则时,编译过程会产生如下格式的NOT ALLOWED错误:
+
+```
+[NOT ALLOWED]: Chipset SDK module libNNN.z.so depends on non Chipset SDK module libsystemMMM.z.so in //NNNpath:libNNN
```
-因此,首先需要检查违规模块是否安装到了错误的组件;如果libNNN.z.so不是芯片组件的产物,则install_images里不能填chipset_base_dir;删除install_images后,默认就是安装到系统组件。
+该错误的含义是:系统组件中的libNNN.z.so模块是Chipset SDK模块,其依赖的libsystemMMM.z.so并不是Chipset SDK模块。
+
+处理方法:
+
+1)分析新增对libsystemMMM.z.so的依赖是否合理。
+
+2)如果以上都不满足,可以有两种处理方法:
+
+a)为libsystemMMM.z.so增加chipsetsdk_indirect类型的innerapi_tags,表示此模块为间接依赖模块。
+
+b)联系libsystemMMM.z.so的提供者,把此模块整改为Chipset SDK模块,增加chipsetsdk类型的innerapi_tags。参考第3章Chipset SDK模块基本要求。
+
+### 2.3 Chipset SDK模块白名单管理
+
+- Chipset SDK模块没有标记类型
+
+ 每个Chipset SDK模块都需要在innerapi_tags中加上chipsetsdk或chipsetsdk_indirect标记;否则会报以下类型的错误:
+
+ ```shell
+ [ERROR]: Chipset SDK module libxxx.so has no innerapi_tags with "chipsetsdk", add it in //base/startup/init/interfaces/innerkits/init_module_engine:libinit_stub_empty
+ ```
+
+ 处理方法:按照提示在对应的BUILD.gn中添加innerapi_tags标记。
-### 3.2 申请新的Chipset SDK模块
+- 非Chipset SDK模块添加了Chipset SDK模块标记
-如果经过分析,芯片组件的libNNN.z.so确实需要引入新的Chipset SDK模块,可向架构SIG申请加入Chipset SDK白名单。
+ 非Chipset SDK模块不要添加chipsetsdk或chipsetsdk_indirect标记;否则会报以下类型的错误:
+
+ ```shell
+ [ERROR]: non chipsetsdk_indirect module libdfx_dumpcatcher.z.so with innerapi_tags="chipsetsdk_indirect", //base/hiviewdfx/faultloggerd/interfaces/innerkits/dump_catcher:libdfx_dumpcatcher
+ ```
+
+ 处理方法:按照提示在对应的BUILD.gn中去掉innerapi_tags中的标记。
+
+
+
+## 3. Chipset SDK模块基本要求
+
+### 3.1 Chipset SDK模块所属部件必须加入最小系统部件集
+
+为了支持芯片组件独立编译,每个Chipset SDK模块所属的部件必须是[最小系统部件集合](https://gitee.com/openharmony/productdefine_common/base/standard_system.json)中的部件,确保最小系统可以正常编译;防止芯片组件编译时需要引入其他过多的系统组件部件。
+
+---
+
+三方库模块不需要加入最小系统部件集合。
+
+----
+
+### 3.2 Chipset SDK模块对外API需稳定
+
+Chipset SDK模块提供了跨组件的接口,需要维护稳定的对外API。基本要求如下:
+
+1)对外头文件需要在所属部件bundle.json中标注
+
+2)对外头文件需要清晰的文档注释
+
+3)对外API变更时需要向前兼容
+
+### 3.3 Chipset SDK模块需加入到白名单列表
+
+chipsetsdk_indirect类型的Chipset SDK模块需加入到[chipsetsdk_indirect.json](chipsetsdk_indirect.json)文件中
+
+~~chipsetsdk类型的Chipset SDK模块需加入到[whitelist.json](whitelist.json)文件中,同时需要更新模块描述信息文件[chipsetsdk_info.json](chipsetsdk_info.json)。~~
+
+**chipsetsdk类型的Chipset SDK模块需加入到[chipsetsdk_info.json](chipsetsdk_info.json)文件中(原whistlist.json文件中原so信息已变更格式后迁移到 chipsetsdk_info.json文件中 ,后续请更新chipsetsdk_info.json文件,不再使用whitelist.json文件)**
+
+**白名单json格式变更如下**
+
+变更前:
+```
+[
+ "libc.so",
+ "so名称"
+]
+```
+变更后:
+```
+[
+ {
+ "name": "hilog:libhilog",
+ "so_file_name": "libhilog.so",
+ "path": "//base/hiviewdfx/hilog/interfaces/native/innerkits:libhilog",
+ "headers": [
+ "//base/hiviewdfx/hilog/interfaces/native/innerkits/include/"
+ ]
+ },
+ {
+ "name": "归属部件:模块名",
+ "so_file_name": "so名称",
+ "path": "编译路径",
+ "headers": []
+ }
+]
+```
+----
\ No newline at end of file
diff --git a/tools/deps_guard/rules/ChipsetSDK/chipsetsdk_indirect.json b/tools/deps_guard/rules/ChipsetSDK/chipsetsdk_indirect.json
new file mode 100755
index 0000000000000000000000000000000000000000..787f178277bfa840ae8b5739c2b4034a914c3024
--- /dev/null
+++ b/tools/deps_guard/rules/ChipsetSDK/chipsetsdk_indirect.json
@@ -0,0 +1,245 @@
+[
+ {
+ "name": "drivers_interface_display:libdisplay_buffer_proxy_1.0",
+ "so_file_name": "libdisplay_buffer_proxy_1.0.z.so",
+ "path": "//drivers/interface/display/buffer/v1_0:libdisplay_buffer_proxy_1.0",
+ "headers": [
+ "//drivers/hdf_core/adapter/uhdf2/include/hdi/",
+ "//drivers/hdf_core/adapter/uhdf2/osal/include/",
+ "//drivers/hdf_core/adapter/uhdf2/ipc/include/",
+ "//drivers/hdf_core/framework/include/utils/",
+ "//drivers/hdf_core/interfaces/inner_api/utils/",
+ "//drivers/hdf_core/interfaces/inner_api/osal/uhdf/",
+ "//drivers/hdf_core/interfaces/inner_api/hdi/"
+ ]
+ },
+ {
+ "name": "eventhandler:libeventhandler",
+ "so_file_name": "libeventhandler.z.so",
+ "path": "//base/notification/eventhandler/frameworks/eventhandler:libeventhandler",
+ "headers": [
+ "//base/notification/eventhandler/interfaces/inner_api/"
+ ]
+ },
+ {
+ "name": "selinux:libselinux",
+ "so_file_name": "libselinux.z.so",
+ "path": "//third_party/selinux:libselinux",
+ "headers": [
+ "//third_party/selinux/libselinux/include/",
+ "//third_party/selinux/libselinux/",
+ "//third_party/pcre2/pcre2/src/",
+ "//third_party/FreeBSD/"
+ ]
+ },
+ {
+ "name": "pcre2:libpcre2",
+ "so_file_name": "libpcre2.z.so",
+ "path": "//third_party/pcre2:libpcre2",
+ "headers": []
+ },
+ {
+ "name": "hitrace:libhitracechain",
+ "so_file_name": "libhitracechain.so",
+ "path": "//base/hiviewdfx/hitrace/interfaces/native/innerkits:libhitracechain",
+ "headers": [
+ "//base/hiviewdfx/hitrace/interfaces/native/innerkits/include/"
+ ]
+ },
+ {
+ "name": "drivers_interface_display:libdisplay_buffer_hdi_impl",
+ "so_file_name": "libdisplay_buffer_hdi_impl.z.so",
+ "path": "//drivers/interface/display/buffer/v1_0:libdisplay_buffer_hdi_impl",
+ "headers": [
+ "//drivers/interface/display/buffer/"
+ ]
+ },
+ {
+ "name": "ipc:ipc_common",
+ "so_file_name": "libipc_common.z.so",
+ "path": "//foundation/communication/ipc/ipc/native/src/core:ipc_common",
+ "headers": [
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/"
+ ]
+ },
+ {
+ "name": "nghttp2:libnghttp2_shared",
+ "so_file_name": "libnghttp2_shared.z.so",
+ "path": "//third_party/nghttp2/lib:libnghttp2_shared",
+ "headers": []
+ },
+ {
+ "name": "access_token:libtokenid_sdk",
+ "so_file_name": "libtokenid_sdk.z.so",
+ "path": "//base/security/access_token/interfaces/innerkits/accesstoken:libtokenid_sdk",
+ "headers": [
+ "//base/security/access_token/interfaces/innerkits/accesstoken/include/"
+ ]
+ },
+ {
+ "name": "access_token:accesstoken_communication_adapter_cxx",
+ "so_file_name": "libaccesstoken_communication_adapter_cxx.z.so",
+ "path": "//base/security/access_token/frameworks/accesstoken:accesstoken_communication_adapter_cxx",
+ "headers": [
+ "//base/security/access_token/frameworks/accesstoken/include/",
+ "//base/security/access_token/frameworks/common/include/",
+ "//base/security/access_token/interfaces/innerkits/accesstoken/include/"
+ ]
+ },
+ {
+ "name": "access_token:accesstoken_communication_adapter_cxx",
+ "so_file_name": "libaccesstoken_common_cxx.z.so",
+ "path": "//base/security/access_token/frameworks/accesstoken:accesstoken_communication_adapter_cxx",
+ "headers": [
+ "//base/security/access_token/frameworks/accesstoken/include/",
+ "//base/security/access_token/frameworks/common/include/",
+ "//base/security/access_token/interfaces/innerkits/accesstoken/include/"
+ ]
+ },
+ {
+ "name": "dsoftbus:softbus_client",
+ "so_file_name": "libsoftbus_client.z.so",
+ "path": "//foundation/communication/dsoftbus/sdk:softbus_client",
+ "headers": [
+ "//foundation/communication/dsoftbus/interfaces/kits/",
+ "//foundation/communication/dsoftbus/interfaces/kits/bus_center/",
+ "//foundation/communication/dsoftbus/interfaces/kits/common/",
+ "//foundation/communication/dsoftbus/interfaces/kits/discovery/",
+ "//foundation/communication/dsoftbus/interfaces/kits/transport/",
+ "//foundation/communication/dsoftbus/sdk/transmission/session/cpp/include/",
+ "//foundation/communication/dsoftbus/interfaces/inner_kits/transport/",
+ "//foundation/communication/dsoftbus/core/common/dfx/hisysevent_adapter/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//commonlibrary/c_utils/base/include/",
+ "//base/hiviewdfx/hisysevent/interfaces/native/innerkits/hisysevent/include/",
+ "//base/hiviewdfx/hisysevent/interfaces/native/innerkits/hisysevent/encode/include/",
+ "//third_party/bounds_checking_function/include/"
+ ]
+ },
+ {
+ "name": "samgr:samgr_common",
+ "so_file_name": "libsamgr_common.z.so",
+ "path": "//foundation/systemabilitymgr/samgr/interfaces/innerkits/common:samgr_common",
+ "headers": [
+ "//foundation/systemabilitymgr/samgr/interfaces/innerkits/common/include/",
+ "//foundation/systemabilitymgr/samgr/interfaces/innerkits/samgr_proxy/include/",
+ "//foundation/systemabilitymgr/samgr/services/dfx/include/"
+ ]
+ },
+ {
+ "name": "mbedtls:mbedtls_shared",
+ "so_file_name": "libmbedtls.z.so",
+ "path": "//third_party/mbedtls:mbedtls_shared",
+ "headers": [
+ "//third_party/mbedtls/include/",
+ "//third_party/mbedtls/library/",
+ "//third_party/mbedtls/include/mbedtls/",
+ "//third_party/mbedtls/tests/include/",
+ "//third_party/mbedtls/port/config/compat_posix/"
+ ]
+ },
+ {
+ "name": "dsoftbus:FillpSo",
+ "so_file_name": "libFillpSo.z.so",
+ "path": "//foundation/communication/dsoftbus/components/nstackx_enhanced/fillp:FillpSo",
+ "headers": []
+ },
+ {
+ "name": "dsoftbus:nstackx_dfile",
+ "so_file_name": "libnstackx_dfile.z.so",
+ "path": "//foundation/communication/dsoftbus/components/nstackx_enhanced/nstackx_core:nstackx_dfile",
+ "headers": []
+ },
+ {
+ "name": "dsoftbus:softbus_adapter",
+ "so_file_name": "libsoftbus_adapter.z.so",
+ "path": "//foundation/communication/dsoftbus/adapter:softbus_adapter",
+ "headers": [
+ "//foundation/communication/dsoftbus/adapter/common/include/",
+ "//foundation/communication/dsoftbus/adapter/common/include/OS_adapter_define/linux/",
+ "//foundation/communication/dsoftbus/adapter/default_config/spec_config/",
+ "//foundation/communication/dsoftbus/core/common/include/",
+ "//base/hiviewdfx/hitrace/interfaces/native/innerkits/include/",
+ "//third_party/openssl/include/"
+ ]
+ },
+ {
+ "name": "dsoftbus:softbus_utils",
+ "so_file_name": "libsoftbus_utils.z.so",
+ "path": "//foundation/communication/dsoftbus/core/common:softbus_utils",
+ "headers": [
+ "//foundation/communication/dsoftbus/core/common/include/",
+ "//foundation/communication/dsoftbus/core/common/dfx/hidumper_adapter/include/",
+ "//foundation/communication/dsoftbus/core/common/dfx/hisysevent_adapter/include/",
+ "//foundation/communication/dsoftbus/interfaces/kits/common/",
+ "//foundation/communication/dsoftbus/adapter/common/include/",
+ "//foundation/communication/dsoftbus/components/nstackx/nstackx_core/dfile/interface/",
+ "//foundation/communication/dsoftbus/core/connection/interface/",
+ "//foundation/communication/dsoftbus/core/connection/common/include/",
+ "//foundation/communication/dsoftbus/core/connection/manager/",
+ "//foundation/communication/dsoftbus/core/transmission/trans_channel/proxy/include/",
+ "//foundation/communication/dsoftbus/core/transmission/common/include/",
+ "//foundation/communication/dsoftbus/interfaces/kits/transport/",
+ "//foundation/communication/dsoftbus/adapter/common/include/",
+ "//foundation/communication/dsoftbus/adapter/common/include/OS_adapter_define/linux/",
+ "//foundation/communication/dsoftbus/adapter/default_config/spec_config/",
+ "//foundation/communication/dsoftbus/core/common/include/",
+ "//base/hiviewdfx/hitrace/interfaces/native/innerkits/include/",
+ "//third_party/openssl/include/",
+ "//third_party/cJSON/",
+ "//third_party/sqlite/include/",
+ "//commonlibrary/c_utils/base/include/",
+ "//foundation/communication/dsoftbus/components/nstackx/nstackx_ctrl/interface/",
+ "//foundation/communication/dsoftbus/components/nstackx/nstackx_util/interface/",
+ "//foundation/communication/dsoftbus/components/nstackx/nstackx_util/platform/unix/"
+ ]
+ },
+ {
+ "name": "dsoftbus:nstackx_congestion",
+ "so_file_name": "libnstackx_congestion.z.so",
+ "path": "//foundation/communication/dsoftbus/components/nstackx_enhanced/nstackx_congestio:nstackx_congestionn",
+ "headers": []
+ },
+ {
+ "name": "algorithm:msdp_ble_range",
+ "so_file_name": "libmsdp_ble_range.z.so",
+ "path": "//base/msdp/algorithm/ble_range:msdp_ble_range",
+ "headers": []
+ },
+ {
+ "name": "sqlite:sqlite",
+ "so_file_name": "libsqlite.z.so",
+ "path": "//third_party/sqlite:sqlite",
+ "headers": [
+ "//third_party/sqlite/include/",
+ "//commonlibrary/c_utils/base/include/"
+ ]
+ },
+ {
+ "name": "dsoftbus:nstackx_util",
+ "so_file_name": "libnstackx_util.z.so",
+ "path": "//foundation/communication/dsoftbus/componets/nstackx_enhanced/nstackx_util:nstackx_util",
+ "headers": []
+ },
+ {
+ "name": "faultloggerd:libbacktrace_local",
+ "so_file_name": "libbacktrace_local.so",
+ "path": "//base/hiviewdfx/faultloggerd/interfaces/innerkits/backtrace_loacal",
+ "headers": []
+ },
+ {
+ "name": "faultloggerd:libdfx_dumpcatcher",
+ "so_file_name": "libdfx_dumpcatcher.z.so",
+ "path": "//base/hiviewdfx/faultloggerd/interfaces/innerkits/dump_catcher:libdfx_dumpcatcher",
+ "headers": [
+ "//base/hiviewdfx/faultloggerd/interfaces/innerkits/dump_catcher/include/"
+ ]
+ },
+ {
+ "name": "faultloggerd:libdfx_procinfo",
+ "so_file_name": "libdfx_procinfo.z.so",
+ "path": "//base/hiviewdfx/faultloggerd/interfaces/innerkits/procinfo:libdfx_proinfo",
+ "headers": []
+ }
+]
diff --git a/tools/deps_guard/rules/ChipsetSDK/chipsetsdk_info.json b/tools/deps_guard/rules/ChipsetSDK/chipsetsdk_info.json
new file mode 100644
index 0000000000000000000000000000000000000000..c411058f966a9e09fd48d6144e901f910c21d8ca
--- /dev/null
+++ b/tools/deps_guard/rules/ChipsetSDK/chipsetsdk_info.json
@@ -0,0 +1,525 @@
+[
+ {
+ "name": "build_framework:musl-libc.so",
+ "so_file_name": "libc.so",
+ "path": "//build/common/musl:musl-libc.so",
+ "headers": []
+ },
+ {
+ "name": "build_framework:musl-libcxx.so",
+ "so_file_name": "libc++.so",
+ "path": "//build/common/musl:musl-libcxx.so",
+ "headers": []
+ },
+ {
+ "name": "hilog:libhilog",
+ "so_file_name": "libhilog.so",
+ "path": "//base/hiviewdfx/hilog/interfaces/native/innerkits:libhilog",
+ "headers": [
+ "//base/hiviewdfx/hilog/interfaces/native/innerkits/include/"
+ ]
+ },
+ {
+ "name": "hilog_override:libhilog_ndk",
+ "so_file_name": "libhilog_ndk.z.so",
+ "path": "",
+ "headers": []
+ },
+ {
+ "name": "hisysevent:libhisysevent",
+ "so_file_name": "libhisysevent.z.so",
+ "path": "//base/hiviewdfx/hisysevent/interfaces/native/innerkits/hisysevent:libhisysevent",
+ "headers": [
+ "//base/hiviewdfx/hisysevent/interfaces/native/innerkits/hisysevent/include/",
+ "//base/hiviewdfx/hisysevent/interfaces/native/innerkits/hisysevent/encode/include/"
+ ]
+ },
+ {
+ "name": "hicollie:libhicollie",
+ "so_file_name": "libhicollie.z.so",
+ "path": "//base/hiviewdfx/hicollie/interfaces/native/innerkits:libhicollie",
+ "headers": [
+ "//base/hiviewdfx/hicollie/interfaces/native/innerkits/include/"
+ ]
+ },
+ {
+ "name": "selinux_adapter:libservice_checker",
+ "so_file_name": "libservice_checker.z.so",
+ "path": "//base/security/selinux_adapter:libservice_checker",
+ "headers": [
+ "//base/security/selinux_adapter/interfaces/policycoreutils/include/",
+ "//third_party/selinux/libselinux/include/"
+ ]
+ },
+ {
+ "name": "init:libbegetutil",
+ "so_file_name": "libbegetutil.z.so",
+ "path": "//base/startup/init/interfaces/innerkits:libbegetutil",
+ "headers": [
+ "//base/startup/init/interfaces/innerkits/include/",
+ "//base/startup/init/interfaces/innerkits/include/syspara/",
+ "//base/startup/init/interfaces/innerkits/include/token/",
+ "//base/startup/init/services/include/",
+ "//base/startup/init/services/include/param/"
+ ]
+ },
+ {
+ "name": "init:libbeget_proxy",
+ "so_file_name": "libbeget_proxy.z.so",
+ "path": "//base/startup/init/interfaces/innerkits:libbeget_proxy",
+ "headers": [
+ "//base/startup/init/interfaces/innerkits/include/",
+ "//base/startup/init/interfaces/innerkits/include/syspara/",
+ "//base/startup/init/interfaces/innerkits/include/token/",
+ "//base/startup/init/services/include/",
+ "//base/startup/init/services/include/param/"
+ ]
+ },
+ {
+ "name": "c_utils:utils",
+ "so_file_name": "libutils.z.so",
+ "path": "//commonlibrary/c_utils/base:utils",
+ "headers": [
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/"
+ ]
+ },
+ {
+ "name": "bounds_checking_function:libsec_shared",
+ "so_file_name": "libsec_shared.z.so",
+ "path": "//third_party/bounds_checking_function:libsec_shared",
+ "headers": [
+ "//third_party/bounds_checking_function/include/"
+ ]
+ },
+ {
+ "name": "hdf_core:libhdi",
+ "so_file_name": "libhdi.z.so",
+ "path": "//drivers/hdf_core/adapter/uhdf2/hdi:libhdi",
+ "headers": [
+ "//drivers/hdf_core/interfaces/inner_api/hdi/",
+ "//drivers/hdf_core/interfaces/inner_api/hdi/base/",
+ "//drivers/hdf_core/interfaces/inner_api/core/"
+ ]
+ },
+ {
+ "name": "hdf_core:libpub_utils",
+ "so_file_name": "libpub_utils.z.so",
+ "path": "//drivers/hdf_core/adapter/uhdf2/pub_utils:libpub_utils",
+ "headers": [
+ "//drivers/hdf_core/interfaces/inner_api/osal/shared/",
+ "//drivers/hdf_core/interfaces/inner_api/osal/uhdf/",
+ "//drivers/hdf_core/interfaces/inner_api/utils/",
+ "//drivers/hdf_core/interfaces/inner_api/core/"
+ ]
+ },
+ {
+ "name": "hdf_core:libhdf_ipc_adapter",
+ "so_file_name": "libhdf_ipc_adapter.z.so",
+ "path": "//drivers/hdf_core/adapter/uhdf2/ipc:libhdf_ipc_adapter",
+ "headers": [
+ "//drivers/hdf_core/interfaces/inner_api/ipc/",
+ "//drivers/hdf_core/interfaces/inner_api/core/"
+ ]
+ },
+ {
+ "name": "ipc:ipc_single",
+ "so_file_name": "libipc_single.z.so",
+ "path": "//foundation/communication/ipc/interfaces/innerkits/ipc_single:ipc_single",
+ "headers": [
+ "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include/",
+ "//foundation/communication/ipc/ipc/native/src/core/include/",
+ "//foundation/communication/ipc/ipc/native/src/mock/include/",
+ "//commonlibrary/c_utils/base/include/"
+ ]
+ },
+ {
+ "name": "drivers_interface_display:libhdifd_parcelable",
+ "so_file_name": "libhdifd_parcelable.z.so",
+ "path": "//drivers/interface/display/composer/hdifd_parcelable:libhdifd_parcelable",
+ "headers": [
+ "//drivers/interface/display/composer/hdifd_parcelable/"
+ ]
+ },
+ {
+ "name": "config_policy:configpolicy_util",
+ "so_file_name": "libconfigpolicy_util.z.so",
+ "path": "//base/customization/config_policy/frameworks/config_policy:configpolicy_util",
+ "headers": [
+ "//base/customization/config_policy/interfaces/inner_api/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//base/startup/init/services/include/param/",
+ "//base/telephony/core_service/utils/common/include/"
+ ]
+ },
+ {
+ "name": "build_framework:libstd.dylib",
+ "so_file_name": "libstd.dylib.so",
+ "path": "//build/rust:libstd.dylib.so",
+ "headers": []
+ },
+ {
+ "name": "libdrm:libdrm",
+ "so_file_name": "libdrm.so",
+ "path": "//third_party/libdrm:libdrm",
+ "headers": [
+ "//third_party/libdrm/",
+ "//third_party/libdrm/include/",
+ "//third_party/libdrm/include/drm/"
+ ]
+ },
+ {
+ "name": "libpng:libpng",
+ "so_file_name": "libpng.z.so",
+ "path": "//third_party/libpng:libpng",
+ "headers": []
+ },
+ {
+ "name": "libxml2:xml2",
+ "so_file_name": "libxml2.z.so",
+ "path": "//third_party/libxml2:xml2",
+ "headers": [
+ "//third_party/libxml2/include/"
+ ]
+ },
+ {
+ "name": "libexif:libexif",
+ "so_file_name": "libexif.z.so",
+ "path": "//third_party/libexif:libexif",
+ "headers": []
+ },
+ {
+ "name": "cJSON:cjson",
+ "so_file_name": "libcjson.z.so",
+ "path": "//third_party/cJSON:cjson",
+ "headers": [
+ "//third_party/cJSON/"
+ ]
+ },
+ {
+ "name": "jsoncpp:jsoncpp",
+ "so_file_name": "libjsoncpp.z.so",
+ "path": "//third_party/jsoncpp:jsoncpp",
+ "headers": [
+ "//third_party/jsoncpp/include/"
+ ]
+ },
+ {
+ "name": "libnl:libnl_share",
+ "so_file_name": "libnl_share.z.so",
+ "path": "//third_party/libnl:libnl_share",
+ "headers": []
+ },
+ {
+ "name": "zlib:shared_libz",
+ "so_file_name": "libshared_libz.z.so",
+ "path": "//third_party/zlib:shared_libz",
+ "headers": [
+ "//third_party/zlib/"
+ ]
+ },
+ {
+ "name": "openssl:libcrypto_shared",
+ "so_file_name": "libcrypto_openssl.z.so",
+ "path": "//third_party/openssl:libcrypto_shared",
+ "headers": [
+ "//third_party/openssl/include/",
+ "//out/rk3568/obj/third_party/openssl/build_all_generated/linux-armv4/include/"
+ ]
+ },
+ {
+ "name": "libunwind:libunwind",
+ "so_file_name": "libunwind.z.so",
+ "path": "//third_party/libunwind:libunwind",
+ "headers": [
+ "//third_party/libunwind/src/",
+ "//third_party/libunwind/include/",
+ "//third_party/libunwind/include/tdep-arm/"
+ ]
+ },
+ {
+ "name": "graphic_2d:surface",
+ "so_file_name": "libsurface.z.so",
+ "path": "//foundation/graphic/graphic_2d/frameworks/surface:surface",
+ "headers": [
+ "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include/",
+ "//foundation/graphic/graphic_2d/interfaces/inner_api/surface/",
+ "//foundation/graphic/graphic_2d/interfaces/inner_api/common/",
+ "//foundation/graphic/graphic_2d/utils/sandbox/",
+ "//commonlibrary/c_utils/base/include/"
+ ]
+ },
+ {
+ "name": "samgr:samgr_proxy",
+ "so_file_name": "libsamgr_proxy.z.so",
+ "path": "//foundation/systemabilitymgr/samgr/interfaces/innerkits/samgr_proxy:samgr_proxy",
+ "headers": [
+ "//foundation/systemabilitymgr/samgr/services/lsamgr/include/"
+ ]
+ },
+ {
+ "name": "ril_adapter:hril_innerkits",
+ "so_file_name": "libhril_innerkits.z.so",
+ "path": "//base/telephoney/ril_adapter/interfaces/innerkits:hril_innerkits",
+ "headers": []
+ },
+ {
+ "name": "drivers_interface_power:libpower_proxy_1.0",
+ "so_file_name": "libpower_proxy_1.0.z.so",
+ "path": "//drivers/interface/power/v1_0:libpower_proxy_1.0",
+ "headers": [
+ "//drivers/hdf_core/adapter/uhdf2/include/hdi/",
+ "//drivers/hdf_core/adapter/uhdf2/osal/include/",
+ "//drivers/hdf_core/adapter/uhdf2/ipc/include/",
+ "//drivers/hdf_core/framework/include/utils/",
+ "//drivers/hdf_core/interfaces/inner_api/utils/",
+ "//drivers/hdf_core/interfaces/inner_api/osal/uhdf/",
+ "//drivers/hdf_core/interfaces/inner_api/hdi/",
+ "//out/rk3568/gen/drivers/interface/",
+ "//out/rk3568/gen/drivers/interface/power/"
+ ]
+ },
+ {
+ "name": "drivers_interface_camera:libbuffer_producer_sequenceable_1.0",
+ "so_file_name": "libbuffer_producer_sequenceable_1.0.z.so",
+ "path": "//drivers/interface/camera/sequenceable/buffer_producer:libbuffer_producer_sequenceable_1.0",
+ "headers": [
+ "//drivers/interface/camera/sequenceable/buffer_producer/",
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//foundation/systemabilitymgr/samgr/services/lsamgr/include/",
+ "//base/notification/eventhandler/interfaces/inner_api/",
+ "//base/notification/eventhandler/frameworks/eventhandler/include/"
+ ]
+ },
+ {
+ "name": "ffmpeg:libohosffmpeg",
+ "so_file_name": "libohosffmpeg.z.so",
+ "path": "//third_party/ffmpeg:libohosffmpeg",
+ "headers": []
+ },
+ {
+ "name": "protobuf:protobuf_lite",
+ "so_file_name": "libprotobuf_lite.z.so",
+ "path": "//third_party/protobuf:protobuf_lite",
+ "headers": [
+ "//third_party/protobuf/src/"
+ ]
+ },
+ {
+ "name": "memory_utils:libdmabufheap",
+ "so_file_name": "libdmabufheap.z.so",
+ "path": "//commonlibrary/memory_utils/libdmabufheap:libdmabufheap",
+ "headers": [
+ "//commonlibrary/memory_utils/libdmabufheap/include/"
+ ]
+ },
+ {
+ "name": "graphic_2d:sync_fence",
+ "so_file_name": "libsync_fence.z.so",
+ "path": "//foundation/graphic/graphic_2d/utils/sync_fence:sync_fence",
+ "headers": [
+ "//foundation/graphic/graphic_2d/utils/sync_fence/export/",
+ "//commonlibrary/c_utils/base/include/",
+ "//base/hiviewdfx/hilog/interfaces/native/innerkits/",
+ "//base/notification/eventhandler/interfaces/inner_api/",
+ "//foundation/graphic/graphic_2d/utils/log/"
+ ]
+ },
+ {
+ "name": "access_token:libaccesstoken_sdk",
+ "so_file_name": "libaccesstoken_sdk.z.so",
+ "path": "//base/security/access_token/interfaces/innerkits/accesstoken:libaccesstoken_sdk",
+ "headers": [
+ "//base/security/access_token/interfaces/innerkits/accesstoken/include/"
+ ]
+ },
+ {
+ "name": "drivers_interface_camera:metadata",
+ "so_file_name": "libmetadata.z.so",
+ "path": "//drivers/interface/camera/metadata:metadata",
+ "headers": [
+ "//drivers/interface/camera/metadata/include/",
+ "//drivers/interface/camera/sequenceable/buffer_producer/",
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//foundation/systemabilitymgr/samgr/services/lsamgr/include/",
+ "//base/notification/eventhandler/interfaces/inner_api/",
+ "//base/notification/eventhandler/frameworks/eventhandler/include/"
+
+ ]
+ },
+ {
+ "name": "hitrace:hitrace_meter",
+ "so_file_name": "libhitrace_meter.so",
+ "path": "//base/hiviewdfx/hitrace/interfaces/native/innerkits:hitrace_meter",
+ "headers": [
+ "//base/hiviewdfx/hitrace/interfaces/native/innerkits/include/hitrace_meter/",
+ "//base/hiviewdfx/hitrace/interfaces/native/innerkits/include/",
+ "//base/hiviewdfx/hitrace/frameworks/include/"
+ ]
+ },
+ {
+ "name": "hdf_core:libhdf_utils",
+ "so_file_name": "libhdf_utils.z.so",
+ "path": "//drivers/hdf_core/adapter/uhdf2/utils:libhdf_utils",
+ "headers": [
+ "//drivers/hdf_core/interfaces/inner_api/osal/shared/",
+ "//drivers/hdf_core/interfaces/inner_api/osal/uhdf/",
+ "//drivers/hdf_core/interfaces/inner_api/utils/",
+ "//drivers/hdf_core/interfaces/inner_api/core/",
+ "//drivers/hdf_core/interfaces/inner_api/ipc/",
+ "//drivers/hdf_core/interfaces/inner_api/hdi/"
+ ]
+ },
+ {
+ "name": "drivers_peripheral_codec:libcodec_hdi_omx_callback_type_service_impl",
+ "so_file_name": "libcodec_hdi_omx_callback_type_service_impl.z.so",
+ "path": "//drivers/peripheral/codec/hal:libcodec_hdi_omx_callback_type_service_impl",
+ "headers": [
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//foundation/systemabilitymgr/samgr/services/lsamgr/include/"
+ ]
+ },
+ {
+ "name": "ipc:ipc_core",
+ "so_file_name": "libipc_core.z.so",
+ "path": "//foundation/communication/ipc/interfaces/innerkits/ipc_core:ipc_core",
+ "headers": [
+ "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include/",
+ "//foundation/communication/ipc/ipc/native/src/core/include/",
+ "//foundation/communication/ipc/ipc/native/src/mock/include/",
+ "//commonlibrary/c_utils/base/include/"
+ ]
+ },
+ {
+ "name": "bluetooth:btcommon",
+ "so_file_name": "libbtcommon.z.so",
+ "path": "//foundation/communication/bluetooth/frameworks/inner:btcommon",
+ "headers": [
+ "//foundation/communication/bluetooth/frameworks/inner/common/",
+ "//foundation/communication/bluetooth/frameworks/inner/ipc/common/",
+ "//foundation/communication/bluetooth/frameworks/inner/ipc/include/",
+ "//foundation/communication/bluetooth/frameworks/inner/ipc/interface/",
+ "//foundation/communication/bluetooth/frameworks/inner/ipc/parcel/"
+ ]
+ },
+ {
+ "name": "build_framework:libclang_rt.ubsan_minimal.so",
+ "so_file_name": "libclang_rt.ubsan_minimal.so",
+ "path": "//build/common/ubsan:libclang_rt.ubsan_minimal.so",
+ "headers": []
+ },
+ {
+ "name": "unknow",
+ "so_file_name": "libopencv_core.z.so",
+ "path": "",
+ "headers": []
+ },
+ {
+ "name": "unknow",
+ "so_file_name": "libopencv_imgproc.z.so",
+ "path": "",
+ "headers": []
+ },
+ {
+ "name": "curl:curl_shared",
+ "so_file_name": "libcurl_shared.z.so",
+ "path": "//third_party/curl:curl_shared",
+ "headers": [
+ "//third_party/curl/curl-7.79.1/include/",
+ "//third_party/curl/customized/include/"
+ ]
+ },
+ {
+ "name": "safwk:system_ability_fwk",
+ "so_file_name": "libsystem_ability_fwk.z.so",
+ "path": "//foundation/systemabilitymgr/safwk/interfaces/innerkits/safwk:system_ability_fwk",
+ "headers": [
+ "//foundation/systemabilitymgr/safwk/services/safwk/include/",
+ "//foundation/systemabilitymgr/safwk/interfaces/innerkits/safwk/"
+ ]
+ },
+ {
+ "name": "caas_service:libcpp_shared",
+ "so_file_name": "libc++_shared.so",
+ "path": "",
+ "headers": []
+ },
+ {
+ "name": "drivers_interface_camera:libmap_data_sequenceable_1.0",
+ "so_file_name": "libmap_data_sequenceable_1.0.z.so",
+ "path": "//drivers/interface/camera/sequenceable/map_data:libmap_data_sequenceable_1.0",
+ "headers": [
+ "//drivers/interface/camera/sequenceable/map_data/",
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//foundation/systemabilitymgr/samgr/services/lsamgr/include/",
+ "//foundation/systemabilitymgr/safwk/interfaces/innerkits/dynamic_cache/include/",
+ "//base/notification/eventhandler/interfaces/inner_api/",
+ "//base/notification/eventhandler/frameworks/eventhandler/include/"
+ ]
+ },
+ {
+ "name": "drivers_interface_camera:libbuffer_handle_sequenceable_1.0",
+ "so_file_name": "libbuffer_handle_sequenceable_1.0.z.so",
+ "path": "//drivers/interface/camera/sequenceable/buffer_handle:libbuffer_handle_sequenceable_1.0",
+ "headers": [
+ "//drivers/interface/camera/sequenceable/buffer_handle/",
+ "//commonlibrary/c_utils/base/include/",
+ "//third_party/bounds_checking_function/include/",
+ "//foundation/systemabilitymgr/samgr/services/lsamgr/include/",
+ "//foundation/systemabilitymgr/safwk/interfaces/innerkits/dynamic_cache/include/"
+ ]
+ },
+ {
+ "name": "ffrt:libffrt",
+ "so_file_name": "libffrt.so",
+ "path": "//foundation/resourceschedule/ffrt:libffrt",
+ "headers": [
+ "//foundation/resourceschedule/ffrt/src/",
+ "//foundation/resourceschedule/ffrt/interfaces/kits/",
+ "//foundation/resourceschedule/ffrt/interfaces/inner_api/",
+ "//foundation/resourceschedule/ffrt/src/dfx/log/",
+ "//third_party/jsoncpp/include/",
+ "//foundation/resourceschedule/frame_aware_sched/common/include/",
+ "//third_party/libunwind/include/",
+ "//base/hiviewdfx/hitrace/interfaces/native/innerkits/include/hitrace_meter/"
+ ]
+ },
+ {
+ "name": "container_comm:uds_channel",
+ "so_file_name": "libuds_channel.z.so",
+ "path": "",
+ "headers": []
+ },
+ {
+ "name": "openssl:libssl_shared",
+ "so_file_name": "libssl_openssl.z.so",
+ "path": "//third_party/openssl:libssl_shared",
+ "headers": [
+ "//third_party/openssl/include/"
+ ]
+ },
+ {
+ "name": "certificate_manager:cert_manager_sdk",
+ "so_file_name": "libcert_manager_sdk.z.so",
+ "path": "//base/security/certificate_manager/interfaces/innerkits/cert_manager_standard/main:cert_manager_sdk",
+ "headers": [
+ "//base/security/certificate_manager/interfaces/innerkits/cert_manager_standard/main/include/"
+ ]
+ },
+ {
+ "name": "build_framework:libclang_rt.asan.so",
+ "so_file_name": "libclang_rt.asan.so",
+ "path": "//build/common/asan:libclang_rt.asan.so",
+ "headers": []
+ },
+ {
+ "name": "build_framework:libclang_rt.hwasan.so",
+ "so_file_name": "libclang_rt.hwasan.so",
+ "path": "//build/common/asan:libclang_rt.hwasan.so",
+ "headers": []
+ }
+]
diff --git a/tools/deps_guard/rules/ChipsetSDK/images/._chipsetsdk.png b/tools/deps_guard/rules/ChipsetSDK/images/._chipsetsdk.png
new file mode 100755
index 0000000000000000000000000000000000000000..158c72aaa784bbd27d9384c44af5a161a6be14bf
Binary files /dev/null and b/tools/deps_guard/rules/ChipsetSDK/images/._chipsetsdk.png differ
diff --git a/tools/deps_guard/rules/ChipsetSDK/images/chipsetsdk.png b/tools/deps_guard/rules/ChipsetSDK/images/chipsetsdk.png
new file mode 100755
index 0000000000000000000000000000000000000000..014059a2a3715e87f9f057b4805e9cdf256187f3
Binary files /dev/null and b/tools/deps_guard/rules/ChipsetSDK/images/chipsetsdk.png differ
diff --git a/tools/deps_guard/rules/ChipsetSDK/whitelist.json b/tools/deps_guard/rules/ChipsetSDK/whitelist.json
index 05f50f278e833a4b01eee80d34e4e3cf2b391d86..c08829f43ef9a4579147adee615d4bb7f37093e6 100755
--- a/tools/deps_guard/rules/ChipsetSDK/whitelist.json
+++ b/tools/deps_guard/rules/ChipsetSDK/whitelist.json
@@ -1,7 +1,6 @@
[
"libc.so",
"libc++.so",
- "liblog.so",
"libhilog.so",
"libhilog_ndk.z.so",
"libhisysevent.z.so",
@@ -15,9 +14,9 @@
"libpub_utils.z.so",
"libhdf_ipc_adapter.z.so",
"libipc_single.z.so",
- "libdisplay_gralloc.z.so",
"libhdifd_parcelable.z.so",
"libconfigpolicy_util.z.so",
+ "libstd.dylib.so",
// Third party libraries
"libdrm.so",
@@ -27,45 +26,55 @@
"libcjson.z.so",
"libjsoncpp.z.so",
"libnl_share.z.so",
- "libprotobuf.z.so",
"libshared_libz.z.so",
"libcrypto_openssl.z.so",
-
- "libudev.z.so",
+ "libunwind.z.so",
"libsurface.z.so",
"libsamgr_proxy.z.so",
"libhril_innerkits.z.so",
"libpower_proxy_1.0.z.so",
"libbuffer_producer_sequenceable_1.0.z.so",
+
+ "libmap_data_sequenceable_1.0.z.so",
"libbuffer_handle_sequenceable_1.0.z.so",
+ "libohosffmpeg.z.so",
+
// To be optimized
"libprotobuf_lite.z.so",
"libdmabufheap.z.so",
- "libtinyxml2.z.so",
- "libgralloc_priv.z.so",
"libsync_fence.z.so",
// chipset modules can be depended by system directly
- "libosalbase.z.so",
+ "libaccesstoken_sdk.z.so",
"libmetadata.z.so",
- "libdisplay_device.z.so",
- "libdisplay_layer.z.so",
-
- "libdispdev.z.so",
- "libproperty.z.so",
- "libhril_innerkits_ext.z.so",
"libhdi_input.z.so",
"libhitrace_meter.so",
+ "libhdf_utils.z.so",
+ "libdisplay_buffer_vdi_impl.z.so",
+ "libdisplay_buffer_vendor.z.so",
+ "libcodec_hdi_omx_callback_type_service_impl.z.so",
// by libdisplay_layer_video.z.so for hispark taurus platform
- "libhdi_video_layer_client.z.so",
+
// by libwpa.z.so for hispark taurus platform
- "libwifi_driver_client.z.so",
- "librtg_interface.z.so",
- "libconcurrent_task_client.z.so",
- "libffrt.z.so"
+ "libipc_core.z.so",
+ "libffrt.z.so",
+ "libffrt.so",
+ "libbtcommon.z.so",
+ "libclang_rt.ubsan_minimal.so",
+ "libssl_openssl.z.so",
+
+ //add by analyser noh_926_1914 system.img and vendor.img
+ "libopencv_core.z.so",
+ "libopencv_imgproc.z.so",
+ "libcurl_shared.z.so",
+ "libsystem_ability_fwk.z.so",
+ //add by analyser aln
+ "libc++_shared.so",
+
+ "libuds_channel.z.so"
]
diff --git a/tools/deps_guard/rules/NO-Depends-On-HDI/whitelist.json b/tools/deps_guard/rules/NO-Depends-On-HDI/whitelist.json
index 365431a462d8ab003950d5047d15ea2a8c6b8416..57f84f860b7c1a750d8c62748840883e4ec15192 100755
--- a/tools/deps_guard/rules/NO-Depends-On-HDI/whitelist.json
+++ b/tools/deps_guard/rules/NO-Depends-On-HDI/whitelist.json
@@ -4,4 +4,4 @@
"libusb_pnp_manager.z.so",
"libcamera_daemon.so",
"libril_driver.z.so"
-]
+]
diff --git a/tools/deps_guard/rules/NO-Depends-On-SA/whitelist.json b/tools/deps_guard/rules/NO-Depends-On-SA/whitelist.json
index f8da24c45a8f7550bdb6a0d81d5f46e09847caac..80e52e51c23c968170fb18725fa8fa30e932b9bc 100755
--- a/tools/deps_guard/rules/NO-Depends-On-SA/whitelist.json
+++ b/tools/deps_guard/rules/NO-Depends-On-SA/whitelist.json
@@ -2,5 +2,9 @@
"liblbsservice_locator.z.so",
"libnetsys_native_manager.z.so",
"libavsession_service.z.so",
- "libsg_collect_service.z.so"
+ "libsg_collect_service.z.so",
+ "libabnormal_efficiency_mgr_service.z.so",
+ "libquickfix_engine_service.z.so",
+ "libscreenlock_server.z.so",
+ "libdistributedhardwarefwksvr.z.so"
]
diff --git a/tools/deps_guard/rules_checker/__init__.py b/tools/deps_guard/rules_checker/__init__.py
index 7537a0be8d150023a719403982a0673c68c97772..1819e208d3416b185ae2c78d1470db5e4ffa78d2 100755
--- a/tools/deps_guard/rules_checker/__init__.py
+++ b/tools/deps_guard/rules_checker/__init__.py
@@ -35,8 +35,6 @@ def check_all_rules(mgr, args):
r.log("Do %s rule checking now:" % rule.RULE_NAME)
if not r.check():
passed = False
-
- if not passed:
r.log(" Please refer to: \033[91m%s\x1b[0m" % r.get_help_url())
if args and args.no_fail:
diff --git a/tools/deps_guard/rules_checker/chipsetsdk.py b/tools/deps_guard/rules_checker/chipsetsdk.py
index 1f91a1afa35827978c4df48365bf6d3fd154d82f..0c77f040fd661cb9e05e24a5ea8d9aa83fb22e37 100755
--- a/tools/deps_guard/rules_checker/chipsetsdk.py
+++ b/tools/deps_guard/rules_checker/chipsetsdk.py
@@ -22,122 +22,223 @@ import json
from .base_rule import BaseRule
class ChipsetSDKRule(BaseRule):
- RULE_NAME = "ChipsetSDK"
-
- def __is_chipsetsdk_tagged(self, mod):
- if not "innerapi_tags" in mod:
- return False
- if "ndk" in mod["innerapi_tags"]:
- return True
- if "chipsetsdk" in mod["innerapi_tags"]:
- return True
- return False
-
- def __write_innerkits_header_files(self, chipsetsdks):
- inner_kits_info = os.path.join(self.get_mgr().get_product_out_path(), "build_configs/parts_info/inner_kits_info.json")
- with open(inner_kits_info, "r") as f:
- info = json.load(f)
-
- headers = []
- for sdk in chipsetsdks:
- path = sdk["labelPath"][:sdk["labelPath"].find(":")]
- item = {"chipsetsdk": sdk["name"], "path": path, "headers": []}
- if sdk["componentName"] not in info:
- headers.append(item)
- continue
-
- for name, innerapi in info[sdk["componentName"]].items():
- if innerapi["label"] != sdk["labelPath"]:
- continue
- gotHeaders = True
- base = innerapi["header_base"]
- for f in innerapi["header_files"]:
- item["headers"].append(os.path.join(base, f))
- headers.append(item)
-
- try:
- with open(os.path.join(self.get_mgr().get_product_images_path(), "chipsetsdk_info.json"), "w") as f:
- json.dump(headers, f, indent = 4)
- except:
- pass
-
- return headers
-
- def __check_depends_on_chipsetsdk(self):
- lists = self.get_white_lists()
-
- passed = True
-
- chipsetsdks = []
- modules_with_chipsetsdk_tag = []
-
- # Check if any napi modules has dependedBy
- for mod in self.get_mgr().get_all():
- if self.__is_chipsetsdk_tagged(mod):
- modules_with_chipsetsdk_tag.append(mod)
-
- # Check chipset modules only
- if mod["path"].startswith("system"):
- continue
-
- # Check chipset modules depends
- for dep in mod["deps"]:
- callee = dep["callee"]
-
- # If callee is chipset module, it is OK
- if not callee["path"].startswith("system"):
- continue
-
- if callee not in chipsetsdks:
- if "hdiType" not in callee or callee["hdiType"] != "hdi_proxy":
- chipsetsdks.append(callee)
- # If callee is in Chipset SDK white list module, it is OK
- if callee["name"] in lists:
- continue
-
- # If callee is asan library, it is OK
- if callee["name"].endswith(".asan.so"):
- continue
-
- # If callee is hdi proxy module, it is OK
- if "hdiType" in callee and callee["hdiType"] == "hdi_proxy":
- continue
-
- # Not allowed
- passed = False
- self.error("chipset module %s depends on non Chipset SDK module %s in %s" % (mod["name"], callee["name"], mod["labelPath"]))
-
- # Check chipset modules dependedBy
- for dep in mod["dependedBy"]:
- caller = dep["caller"]
-
- # Called by chipset module, it is OK
- if not caller["path"].startswith("system"):
- continue
-
- if mod not in chipsetsdks:
- chipsetsdks.append(mod)
-
- # If chipset module is in Chipset SDK white list module, it is OK
- if mod["name"] in lists:
- continue
-
- # Not allowed
- passed = False
- self.error("system module %s depends on chipset module %s in %s" % (caller["name"], mod["name"], caller["labelPath"]))
-
- for mod in chipsetsdks:
- if not self.__is_chipsetsdk_tagged(mod):
- self.warn('Chipset SDK module %s has no innerapi_tags with "chipsetsdk" or "csdk", add it in %s' % (mod["name"], mod["labelPath"]))
-
- for mod in modules_with_chipsetsdk_tag:
- if mod["name"] not in lists:
- passed = False
- self.error('non chipsetsdk module %s with innerapi_tags="chipsetsdk" or "csdk", %s' % (mod["name"], mod["labelPath"]))
-
- self.__write_innerkits_header_files(chipsetsdks)
-
- return passed
-
- def check(self):
- return self.__check_depends_on_chipsetsdk()
+ RULE_NAME = "ChipsetSDK"
+
+ def __init__(self, mgr, args):
+ super().__init__(mgr, args)
+ self.__out_path = mgr.get_product_out_path()
+ self.__white_lists = self.load_chipsetsdk_json("chipsetsdk_info.json")
+
+ def get_white_lists(self):
+ return self.__white_lists
+
+ def get_out_path(self):
+ return self.__out_path
+
+ def load_chipsetsdk_json(self, name):
+ rules_dir = []
+ rules_dir.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../rules"))
+ if self._args and self._args.rules:
+ self.log("****add more ChipsetSDK info in:{}****".format(self._args.rules))
+ rules_dir = rules_dir + self._args.rules
+
+ chipsetSDK_rules_path = self.get_out_path()
+ if os.path.exists(chipsetSDK_rules_path):
+ self.log("****add more ChipsetSDK info in dir:{}****".format(chipsetSDK_rules_path))
+ rules_dir.append(chipsetSDK_rules_path)
+ res = []
+ for d in rules_dir:
+ rules_file = os.path.join(d, self.__class__.RULE_NAME, name)
+ if os.path.isfile(rules_file):
+ res = self.__parser_rules_file(rules_file, res)
+ else:
+ self.warn("****rules path not exist: {}****".format(rules_file))
+
+ return res
+
+ def __parser_rules_file(self, rules_file, res):
+ try:
+ self.log("****Parsing rules file in {}****".format(rules_file))
+ with open(rules_file, "r") as f:
+ contents = f.read()
+ if not contents:
+ self.log("****rules file {} is null****".format(rules_file))
+ return res
+ json_data = json.loads(contents)
+ for so in json_data:
+ so_file_name = so.get("so_file_name")
+ if so_file_name and so_file_name not in res:
+ res.append(so_file_name)
+ except(FileNotFoundError, IOError, UnicodeDecodeError) as file_open_or_decode_err:
+ self.error(file_open_or_decode_err)
+
+ return res
+
+ def __is_chipsetsdk_tagged(self, mod):
+ if not "innerapi_tags" in mod:
+ return False
+ if "chipsetsdk" in mod["innerapi_tags"]:
+ return True
+ return False
+
+ def __is_chipsetsdk_indirect(self, mod):
+ if not "innerapi_tags" in mod:
+ return False
+ if "chipsetsdk_indirect" in mod["innerapi_tags"]:
+ return True
+ return False
+
+ def __write_innerkits_header_files(self):
+ inner_kits_info = os.path.join(self.get_mgr().get_product_out_path(),
+ "build_configs/parts_info/inner_kits_info.json")
+ with open(inner_kits_info, "r") as f:
+ info = json.load(f)
+
+ headers = []
+ for sdk in self.__chipsetsdks:
+ path = sdk["labelPath"][:sdk["labelPath"].find(":")]
+ target_name = sdk["labelPath"][sdk["labelPath"].find(":") + 1:]
+ item = {"name": sdk["componentName"] + ":" + target_name, "so_file_name":
+ sdk["name"], "path": sdk["labelPath"], "headers": []}
+ if sdk["componentName"] not in info:
+ headers.append(item)
+ continue
+
+ for name, innerapi in info[sdk["componentName"]].items():
+ if innerapi["label"] != sdk["labelPath"]:
+ continue
+ gotHeaders = True
+ base = innerapi["header_base"]
+ for f in innerapi["header_files"]:
+ item["headers"].append(os.path.join(base, f))
+ headers.append(item)
+
+ try:
+ with open(os.path.join(self.get_mgr().get_product_images_path(), "chipsetsdk_info.json"), "w") as f:
+ json.dump(headers, f, indent = 4)
+ except:
+ pass
+
+ return headers
+
+ def __check_chipsetsdk_indirect(self):
+ passed = True
+ for mod in self.__chipsetsdks:
+ for dep in mod["deps"]:
+ callee = dep["callee"]
+
+ # Chipset SDK is OK
+ if callee["name"] in self.get_white_lists():
+ continue
+
+ # chipsetsdk_indirect module is OK
+ if self.__is_chipsetsdk_indirect(callee) or callee["name"] in self.__indirects:
+ continue
+
+ # Not correct
+ passed = False
+ self.error('Chipset SDK module %s should not depends on non Chipset SDK module \
+ %s in %s with "chipsetsdk_indirect"' % (mod["name"], callee["name"], callee["labelPath"]))
+
+ return passed
+
+ def __check_depends_on_chipsetsdk(self):
+ lists = self.get_white_lists()
+
+ passed = True
+
+ self.__chipsetsdks = []
+ self.__modules_with_chipsetsdk_tag = []
+ self.__modules_with_chipsetsdk_indirect_tag = []
+
+ # Check if any napi modules has dependedBy
+ for mod in self.get_mgr().get_all():
+ # Collect all modules with chipsetsdk tag
+ if self.__is_chipsetsdk_tagged(mod):
+ self.__modules_with_chipsetsdk_tag.append(mod)
+
+ # Collect all modules with chipsetsdk_indirect tag
+ if self.__is_chipsetsdk_indirect(mod):
+ self.__modules_with_chipsetsdk_indirect_tag.append(mod)
+
+ # Check chipset modules only
+ if mod["path"].startswith("system"):
+ continue
+
+ # Check chipset modules depends
+ for dep in mod["deps"]:
+ callee = dep["callee"]
+
+ # If callee is chipset module, it is OK
+ if not callee["path"].startswith("system"):
+ continue
+
+ # Add to list
+ if callee not in self.__chipsetsdks:
+ if "hdiType" not in callee or callee["hdiType"] != "hdi_proxy":
+ self.__chipsetsdks.append(callee)
+
+ # If callee is in Chipset SDK white list module, it is OK
+ if callee["name"] in lists:
+ continue
+
+ # If callee is asan library, it is OK
+ if callee["name"].endswith(".asan.so"):
+ continue
+
+ # If callee is hdi proxy module, it is OK
+ if "hdiType" in callee and callee["hdiType"] == "hdi_proxy":
+ continue
+
+ # Not allowed
+ passed = False
+ self.error("chipset module %s depends on non Chipset SDK module %s in %s"
+ % (mod["name"], callee["name"], mod["labelPath"]))
+
+ return passed
+
+
+ def __check_if_tagged_correctly(self):
+ passed = True
+ for mod in self.__chipsetsdks:
+ if not self.__is_chipsetsdk_tagged(mod):
+ self.warn('Chipset SDK module %s has no innerapi_tags with "chipsetsdk", add it in %s'
+ % (mod["name"], mod["labelPath"]))
+
+ for mod in self.__modules_with_chipsetsdk_tag:
+ if mod["name"] not in self.get_white_lists():
+ passed = False
+ self.error('non chipsetsdk module %s with innerapi_tags="chipsetsdk", %s'
+ % (mod["name"], mod["labelPath"]))
+
+ for mod in self.__modules_with_chipsetsdk_indirect_tag:
+ if mod["name"] not in self.__indirects and mod["name"] not in self.get_white_lists():
+ self.warn('non chipsetsdk_indirect module %s with innerapi_tags="chipsetsdk_indirect", %s'
+ % (mod["name"], mod["labelPath"]))
+
+ return passed
+
+ def __load_chipsetsdk_indirects(self):
+ self.__indirects = self.load_chipsetsdk_json("chipsetsdk_indirect.json")
+
+ def check(self):
+ self.__load_chipsetsdk_indirects()
+
+ # Check if all chipset modules depends on chipsetsdk modules only
+ passed = self.__check_depends_on_chipsetsdk()
+ if not passed:
+ return passed
+
+ # Check if all chipsetsdk module depends on chipsetsdk or chipsetsdk_indirect modules only
+ passed = self.__check_chipsetsdk_indirect()
+ if not passed:
+ return passed
+
+ # Check if all ChipsetSDK modules are correctly tagged by innerapi_tags
+ passed = self.__check_if_tagged_correctly()
+ if not passed:
+ return passed
+
+ self.__write_innerkits_header_files()
+
+ return True
diff --git a/tools/deps_guard/rules_checker/hdi_rule.py b/tools/deps_guard/rules_checker/hdi_rule.py
index 170170931c6053d5aac39a0bd2b1c5c208d40e40..a547064fd45bde23ed7afe34c29021e2a442973e 100755
--- a/tools/deps_guard/rules_checker/hdi_rule.py
+++ b/tools/deps_guard/rules_checker/hdi_rule.py
@@ -20,62 +20,73 @@ import json
from .base_rule import BaseRule
-class HdiRule(BaseRule):
- RULE_NAME = "NO-Depends-On-HDI"
-
- def __check_depends_on_hdi(self):
- lists = self.get_white_lists()
-
- passed = True
-
- hdi_without_shlib_type = []
- non_hdi_with_hdi_shlib_type = []
-
- # Check if any napi modules has dependedBy
- for mod in self.get_mgr().get_all():
- is_hdi = False
- if "hdiType" in mod and mod["hdiType"] == "hdi_service":
- is_hdi = True
- # Collect non HDI modules with shlib_type of value "hdi"
- if not is_hdi and ("shlib_type" in mod and mod["shlib_type"] == "hdi"):
- non_hdi_with_hdi_shlib_type.append(mod)
-
- # Collect HDI modules without shlib_type with value of "hdi"
- if is_hdi and ("shlib_type" not in mod or mod["shlib_type"] != "hdi"):
- if mod["name"] not in lists:
- hdi_without_shlib_type.append(mod)
-
- if not is_hdi:
- continue
-
- if len(mod["dependedBy"]) == 0:
- continue
- if mod["name"] in lists:
- continue
-
- # If hdi module has version_script to specify exported symbols, it can be depended by others
- if "version_script" in mod:
- continue
-
- # Check if HDI modules is depended by other modules
- self.error("hdi module %s depended by:" % mod["name"])
- for dep in mod["dependedBy"]:
- caller = dep["caller"]
- self.log(" module [%s] defined in [%s]" % (caller["name"], caller["labelPath"]))
- passed = False
-
- if len(hdi_without_shlib_type) > 0:
- for mod in hdi_without_shlib_type:
- if mod["name"] not in lists:
- passed = False
- self.error('hdi module %s has no shlib_type="hdi", add it in %s' % (mod["name"], mod["labelPath"]))
-
- if len(non_hdi_with_hdi_shlib_type) > 0:
- for mod in non_hdi_with_hdi_shlib_type:
- self.warn('non hdi module %s with shlib_type="hdi", %s' % (mod["name"], mod["labelPath"]))
-
- return passed
-
- def check(self):
- return self.__check_depends_on_hdi()
+class HdiRule(BaseRule):
+ RULE_NAME = "NO-Depends-On-HDI"
+
+ def __check_depends_on_hdi(self):
+ lists = self.get_white_lists()
+
+ passed = True
+
+ hdi_without_shlib_type = []
+ non_hdi_with_hdi_shlib_type = []
+
+ # Check if any napi modules has dependedBy
+ for mod in self.get_mgr().get_all():
+ is_hdi = False
+ if "hdiType" in mod and mod["hdiType"] == "hdi_service":
+ is_hdi = True
+ # Collect non HDI modules with shlib_type of value "hdi"
+ if not is_hdi and ("shlib_type" in mod and mod["shlib_type"] == "hdi"):
+ non_hdi_with_hdi_shlib_type.append(mod)
+
+ # Collect HDI modules without shlib_type with value of "hdi"
+ if is_hdi and ("shlib_type" not in mod or mod["shlib_type"] != "hdi"):
+ if mod["name"] not in lists:
+ hdi_without_shlib_type.append(mod)
+
+ if self.__ignore_mod(mod, is_hdi, lists):
+ continue
+
+ # Check if HDI modules is depended by other modules
+ self.error("hdi module %s depended by:" % mod["name"])
+ for dep in mod["dependedBy"]:
+ caller = dep["caller"]
+ self.log(" module [%s] defined in [%s]" % (caller["name"], caller["labelPath"]))
+ passed = False
+
+ if len(hdi_without_shlib_type) > 0:
+ for mod in hdi_without_shlib_type:
+ if mod["name"] not in lists:
+ passed = False
+ self.error('hdi module %s has no shlib_type="hdi", add it in %s' % (mod["name"], mod["labelPath"]))
+
+ if len(non_hdi_with_hdi_shlib_type) > 0:
+ for mod in non_hdi_with_hdi_shlib_type:
+ self.warn('non hdi module %s with shlib_type="hdi", %s' % (mod["name"], mod["labelPath"]))
+
+ return passed
+
+ def check(self):
+ return self.__check_depends_on_hdi()
+
+ def __ignore_mod(self, mod, is_hdi, lists):
+ ignore_flag = False
+ if not is_hdi:
+ ignore_flag = True
+ return ignore_flag
+
+ if len(mod["dependedBy"]) == 0:
+ ignore_flag = True
+ return ignore_flag
+
+ if mod["name"] in lists:
+ ignore_flag = True
+ return ignore_flag
+
+ # If hdi module has version_script to specify exported symbols, it can be depended by others
+ if "version_script" in mod:
+ ignore_flag = True
+
+ return ignore_flag
diff --git a/tools/fotff/.gitignore b/tools/fotff/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..c9ab9e9416a2460775b9776c0df04af157fba9cf
--- /dev/null
+++ b/tools/fotff/.gitignore
@@ -0,0 +1,28 @@
+# Binaries, caches, configs and outputs for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+logs
+fotff
+fotff.ini
+.fotff
+
+# xdevice default directories
+config
+testcases
+reports
+resource
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# JetBrains IDE
+.idea
diff --git a/tools/fotff/LICENSE b/tools/fotff/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..29f81d812f3e768fa89638d1f72920dbfd1413a8
--- /dev/null
+++ b/tools/fotff/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/tools/fotff/README.md b/tools/fotff/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..fb295d499b153fab314776a5fc40fa2f26e03b07
--- /dev/null
+++ b/tools/fotff/README.md
@@ -0,0 +1,75 @@
+# fotff
+
+#### 介绍
+
+fotff(find out the first fault)是为OpenHarmony持续集成设计的问题自动化问题分析工具。
+
+为了平衡开销与收益,考虑到开发效率、资源占用等因素影响,OpenHarmony代码合入门禁(冒烟测试)只拦截部分严重基础问题(例如开机失败、关键进程崩溃、UX布局严重错乱、电话/相机基础功能不可用等)。因此,一些会影响到更细节功能、影响兼容性、系统稳定性等的问题代码将可能被合入。
+
+fotff提供了一个框架,不断地对最新持续集成版本运行测试套,然后对其中失败用例进行分析:找到或生成在该用例上次通过的持续集成版本和本次失败的持续集成版本之间的所有中间版本,然后运用二分法的思想,找到出现该问题的第一个中间版本,从而给出引入该问题的代码提交。
+
+#### 软件架构
+
+```
+fotff
+├── .fotff # 缓存等程序运行时产生的文件的存放目录
+├── logs # 日志存放目录
+├── pkg # 版本包管理的接口定义和特定开发板形态的具体实现
+├── rec # 测试结果记录和分析
+├── tester # 测试套的接口定义和调用测试框架的具体实现
+├── utils # 一些通用的类库
+├── vcs # 版本控制相关的包,比如manifest的处理,通过OpenAPI访问gitee查询信息的函数等
+├── fotff.ini # 运行需要的必要参数配置,比如指定测试套、配置构建服务器、HTTP代理等
+└── main.go # 框架入口
+```
+
+#### 安装教程
+
+1. 获取[GoSDK](https://golang.google.cn/dl/)并按照指引安装。
+2. 在代码工程根目录执行```go build```编译。如下载依赖库出现网络问题,必要时配置GOPROXY代理。
+3. 更改fotff.ini,按功能需要,选择版本包和测试套的具体实现,完成对应参数配置,并将可能涉及到的测试用例集、脚本、刷机工具等放置到对应位置。
+
+#### 使用说明
+
+###### 普通模式
+
+example: ```fotff```
+
+1. 配置好fotff.ini文件后,不指定任何命令行参数直接执行二进制,即进入普通模式。此模式下,框架会自动不断地获取最新持续集成版本,并对其运行测试套,然后对其中失败用例进行分析。
+2. 分析结果在.fotff/records.json文件中记录;如果配置了邮箱信息,会发送结果到指定邮箱。
+
+###### 对单个用例在指定区间内查找
+
+example: ```fotff run -s pkgDir1 -f pkgDir2 -t TEST_CASE_001```
+
+1. 配置好fotff.ini文件后,通过-s/-f/-t参数在命令行中分别指定成功版本/失败版本/测试用例名,即可对单个用例在指定区间内查找。此模式下,仅在指定的两个版本间进行二分查找,运行指定的运行测试用例。
+2. 分析结果在控制台中打印,不会发送邮件。
+
+###### 烧写指定版本包
+
+example: ```fotff flash -p pkgDir -d 7001005458323933328a01fce1dc3800```
+
+配置好fotff.ini文件后,可以指定版本包目录烧写对应版本。
+
+###### tips
+
+1. 刷机、测试具体实现可能涉及到[hdc_std](https://gitee.com/openharmony/developtools_hdc)、[xdevice](https://gitee.com/openharmony/testfwk_xdevice),安装和配置请参考对应工具的相关页面。
+2. xdevice运行需要Python运行环境,请提前安装。
+3. 刷机、测试过程需要对应开发板的驱动程序,请提前安装。
+
+#### 参与贡献
+
+1. Fork 本仓库
+2. 新建 Feat_xxx 分支
+3. 提交代码
+4. 新建 Pull Request
+
+#### 相关链接
+
+[OpenHarmony CI](http://ci.openharmony.cn/dailys/dailybuilds)
+
+[developtools_hdc](https://gitee.com/openharmony/developtools_hdc)
+
+[dayu200_tools](https://gitee.com/hihope_iot/docs/tree/master/HiHope_DAYU200/烧写工具及指南)
+
+[testfwk_xdevice](https://gitee.com/openharmony/testfwk_xdevice)
diff --git a/tools/fotff/fotff.ini b/tools/fotff/fotff.ini
new file mode 100644
index 0000000000000000000000000000000000000000..08e6ba9131b9477ec56fca4d637d5d4e9459485f
--- /dev/null
+++ b/tools/fotff/fotff.ini
@@ -0,0 +1,58 @@
+pkg_manager = mock
+tester = mock
+
+[pprof]
+enable = true
+port = 8080
+
+[mail]
+host = smtp.example.com
+port = 465
+user = admin
+password = password
+from = admin@example.com
+to = alice@example.com,bob@example.com
+
+[proxy]
+server_list =
+user =
+password =
+
+[resources]
+device_sn_list = 7001005458323933328a01fce1dc3800
+build_server_addr_list = 127.0.0.1:22
+build_server_user = root
+build_server_password = root
+build_server_workspace = /root/fotff/build_workspace
+
+[dayu200]
+archive_dir = C:\dayu200
+watch_ci = false
+workspace = C:\dayu200_workspace
+location_id_list = 110
+branch = master
+manifest_branch = master
+flash_tool = ./pkg/dayu200/upgrade_tool.exe
+
+[gitee_build]
+archive_dir = C:\dayu200
+workspace = C:\dayu200_workspace
+branch = master
+manifest_branch = master
+component = dayu200_asan
+pre_compile_cmd = rm -rf out*; ./build/prebuilts_download.sh
+compile_cmd = ./build/common/asan/build_mixed_asan.sh --product-name rk3568 --ccache -g5:foundation,graphic,distributed_data -g2:appspawn --build-variant root -g0:access_token,accessibility,accountmgr,audio_policy,avsession_service,bgtaskmgr_service,bluetooth_service,bytrace,camera_service,cert_manager_service,console,dcamera,device_manager,device_usage_statistics_service,deviceauth_service,deviceinfoservice,dhardware,distributed_data,distributedbms,distributedfile,distributedsched,downloadservice,dscreen,dslm_service,edm,faultloggerd,hidumper_service,hilogd,hitrace,hiview,huks_service,inputmethodservice,installs,locationsa,media_service,memmgrservice,msdp_musl,multimodalinput,netmanager_base,netsysnative,nwebspawn,param_watcher,pinauth_sa_profile,privacy,pulseaudio,quick_fix,samgr_standard,sensors_musl,storage_daemon,sys_installer_sa,telephony,thermal_protector,timeservice,ui_service,wifi_hal_service,wifi_standard
+image_list = out/rk3568/packages/phone/images/uboot.img
+
+[xdevice]
+task = acts
+config = ./config/user_config.xml
+test_cases_path = ./testcases
+resource_path = ./resource
+
+[smoke]
+py = ./smoke/resource/capturescreentest.py
+config = ./smoke/resource/app_capture_screen_test_config.json
+answer_path = ./smoke/resource
+save_path = ./smoke/save
+tools_path = ./smoke
diff --git a/tools/fotff/go.mod b/tools/fotff/go.mod
new file mode 100644
index 0000000000000000000000000000000000000000..ab14fc53d891dd8f518773d74569802ce97fc411
--- /dev/null
+++ b/tools/fotff/go.mod
@@ -0,0 +1,28 @@
+module fotff
+
+go 1.19
+
+require (
+ code.cloudfoundry.org/archiver v0.0.0-20221114120234-625eff81a7ef
+ github.com/Unknwon/goconfig v1.0.0
+ github.com/huandu/go-clone v1.4.1
+ github.com/jedib0t/go-pretty/v6 v6.4.3
+ github.com/patrickmn/go-cache v2.1.0+incompatible
+ github.com/pkg/sftp v1.13.5
+ github.com/sirupsen/logrus v1.9.0
+ github.com/spf13/cobra v1.6.1
+ golang.org/x/crypto v0.3.0
+ gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
+)
+
+require (
+ github.com/cyphar/filepath-securejoin v0.2.3 // indirect
+ github.com/inconshreveable/mousetrap v1.0.1 // indirect
+ github.com/kr/fs v0.1.0 // indirect
+ github.com/mattn/go-runewidth v0.0.13 // indirect
+ github.com/rivo/uniseg v0.2.0 // indirect
+ github.com/smartystreets/goconvey v1.7.2 // indirect
+ github.com/spf13/pflag v1.0.5 // indirect
+ golang.org/x/sys v0.2.0 // indirect
+ gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
+)
diff --git a/tools/fotff/go.sum b/tools/fotff/go.sum
new file mode 100644
index 0000000000000000000000000000000000000000..9649c260cc6bb63a77b905ee0414c29cab54841f
--- /dev/null
+++ b/tools/fotff/go.sum
@@ -0,0 +1,91 @@
+code.cloudfoundry.org/archiver v0.0.0-20221114120234-625eff81a7ef h1:YMr8OebAw8ufxTyTLPFbMmiChH4M+1RaIpsdLKojZ48=
+code.cloudfoundry.org/archiver v0.0.0-20221114120234-625eff81a7ef/go.mod h1:WK8AWnIZ1W1EpPoVLzsSshXKKqP1Nzk6SoVRxD9cx54=
+github.com/Unknwon/goconfig v1.0.0 h1:9IAu/BYbSLQi8puFjUQApZTxIHqSwrj5d8vpP8vTq4A=
+github.com/Unknwon/goconfig v1.0.0/go.mod h1:wngxua9XCNjvHjDiTiV26DaKDT+0c63QR6H5hjVUUxw=
+github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/cyphar/filepath-securejoin v0.2.3 h1:YX6ebbZCZP7VkM3scTTokDgBL2TY741X51MTk3ycuNI=
+github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
+github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
+github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c=
+github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U=
+github.com/huandu/go-clone v1.4.1 h1:QQYjiLadyxOvdwgZoH8f1xGkvvf4+Cm8be7fo9W2QQA=
+github.com/huandu/go-clone v1.4.1/go.mod h1:ReGivhG6op3GYr+UY3lS6mxjKp7MIGTknuU5TbTVaXE=
+github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
+github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/jedib0t/go-pretty/v6 v6.4.3 h1:2n9BZ0YQiXGESUSR+6FLg0WWWE80u+mIz35f0uHWcIE=
+github.com/jedib0t/go-pretty/v6 v6.4.3/go.mod h1:MgmISkTWDSFu0xOqiZ0mKNntMQ2mDgOcwOkwBEkMDJI=
+github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
+github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
+github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
+github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
+github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
+github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
+github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
+github.com/onsi/gomega v1.24.1 h1:KORJXNNTzJXzu4ScJWssJfJMnJ+2QJqhoQSRwNlze9E=
+github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
+github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
+github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
+github.com/pkg/sftp v1.13.5 h1:a3RLUqkyjYRtBTZJZ1VRrKbN3zhuPLlUc3sphVz81go=
+github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
+github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
+github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=
+github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
+github.com/smartystreets/goconvey v1.7.2 h1:9RBaZCeXEQ3UselpuwUQHltGVXvdwm6cv1hgR6gDIPg=
+github.com/smartystreets/goconvey v1.7.2/go.mod h1:Vw0tHAZW6lzCRk3xgdin6fKYcG+G3Pg9vgXWeJpQFMM=
+github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
+github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.4 h1:wZRexSlwd7ZXfKINDLsO4r7WBt3gTKONc6K/VesHvHM=
+github.com/stretchr/testify v1.7.4/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
+golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.2.0 h1:z85xZCsEl7bi/KwbNADeBYoOP0++7W1ipu+aGnpwzRM=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk=
+gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
+gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/tools/fotff/main.go b/tools/fotff/main.go
new file mode 100644
index 0000000000000000000000000000000000000000..25f3d2fcc1aa5b8eb972302d4d00a44b8bc6e4be
--- /dev/null
+++ b/tools/fotff/main.go
@@ -0,0 +1,208 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package main
+
+import (
+ "context"
+ "fotff/pkg"
+ "fotff/pkg/dayu200"
+ "fotff/pkg/gitee_build"
+ "fotff/pkg/mock"
+ "fotff/rec"
+ "fotff/res"
+ "fotff/tester"
+ "fotff/tester/common"
+ "fotff/tester/manual"
+ testermock "fotff/tester/mock"
+ "fotff/tester/pkg_available"
+ "fotff/tester/smoke"
+ "fotff/tester/xdevice"
+ "fotff/utils"
+ "os"
+ "path/filepath"
+
+ "github.com/sirupsen/logrus"
+ "github.com/spf13/cobra"
+)
+
+var newPkgMgrFuncs = map[string]pkg.NewFunc{
+ "mock": mock.NewManager,
+ "dayu200": dayu200.NewManager,
+ "gitee_build": gitee_build.NewManager,
+}
+
+var newTesterFuncs = map[string]tester.NewFunc{
+ "mock": testermock.NewTester,
+ "manual": manual.NewTester,
+ "common": common.NewTester,
+ "xdevice": xdevice.NewTester,
+ "smoke": smoke.NewTester,
+ "pkg_available": pkg_available.NewTester,
+}
+
+var rootCmd *cobra.Command
+
+func init() {
+ m, t := initExecutor()
+ rootCmd = &cobra.Command{
+ Run: func(cmd *cobra.Command, args []string) {
+ loop(m, t)
+ },
+ }
+ runCmd := initRunCmd(m, t)
+ flashCmd := initFlashCmd(m)
+ testCmd := initTestCmd(m, t)
+ rootCmd.AddCommand(runCmd, flashCmd, testCmd)
+}
+
+func initRunCmd(m pkg.Manager, t tester.Tester) *cobra.Command {
+ var success, fail, testcase string
+ runCmd := &cobra.Command{
+ Use: "run",
+ Short: "bin-search in (success, fail] by do given testcase to find out the fist fail, and print the corresponding issue",
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return fotff(m, t, success, fail, testcase)
+ },
+ }
+ runCmd.PersistentFlags().StringVarP(&success, "success", "s", "", "success package directory")
+ runCmd.PersistentFlags().StringVarP(&fail, "fail", "f", "", "fail package directory")
+ runCmd.PersistentFlags().StringVarP(&testcase, "testcase", "t", "", "testcase name")
+ runCmd.MarkPersistentFlagRequired("success")
+ runCmd.MarkPersistentFlagRequired("fail")
+ runCmd.MarkPersistentFlagRequired("testcase")
+ return runCmd
+}
+
+func initFlashCmd(m pkg.Manager) *cobra.Command {
+ var flashPkg, device string
+ flashCmd := &cobra.Command{
+ Use: "flash",
+ Short: "flash the given package",
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return m.Flash(device, flashPkg, context.TODO())
+ },
+ }
+ flashCmd.PersistentFlags().StringVarP(&flashPkg, "package", "p", "", "package directory")
+ flashCmd.PersistentFlags().StringVarP(&device, "device", "d", "", "device sn")
+ flashCmd.MarkPersistentFlagRequired("package")
+ return flashCmd
+}
+
+func initTestCmd(m pkg.Manager, t tester.Tester) *cobra.Command {
+ var targetPkg, device, testCase string
+ testCmd := &cobra.Command{
+ Use: "test",
+ Short: "build and flash and test the given package on the specified device",
+ RunE: func(cmd *cobra.Command, args []string) error {
+ opt := &rec.FlashAndTestOptions{
+ M: m,
+ T: t,
+ Version: targetPkg,
+ Device: device,
+ TestCase: testCase,
+ }
+ return rec.FlashAndTest(context.TODO(), opt)
+ },
+ }
+ testCmd.PersistentFlags().StringVarP(&targetPkg, "package", "p", "", "package directory")
+ testCmd.PersistentFlags().StringVarP(&device, "device", "d", "", "target device sn")
+ testCmd.PersistentFlags().StringVarP(&testCase, "testcase", "t", "", "test case to run")
+ testCmd.MarkPersistentFlagRequired("package")
+ testCmd.MarkPersistentFlagRequired("device")
+
+ return testCmd
+}
+
+func main() {
+ utils.EnablePprof()
+ if err := rootCmd.Execute(); err != nil {
+ logrus.Errorf("failed to execute: %v", err)
+ os.Exit(1)
+ }
+}
+
+func loop(m pkg.Manager, t tester.Tester) {
+ data, _ := utils.ReadRuntimeData("last_handled.rec")
+ var curPkg = string(data)
+ for {
+ utils.ResetLogOutput()
+ if err := utils.WriteRuntimeData("last_handled.rec", []byte(curPkg)); err != nil {
+ logrus.Errorf("failed to write last_handled.rec: %v", err)
+ }
+ logrus.Info("waiting for a newer package...")
+ var err error
+ curPkg, err = m.GetNewer(curPkg)
+ if err != nil {
+ logrus.Infof("get newer package err: %v", err)
+ continue
+ }
+ utils.SetLogOutput(filepath.Base(curPkg))
+ logrus.Infof("now flash %s...", curPkg)
+ device := res.GetDevice()
+ if err := m.Flash(device, curPkg, context.TODO()); err != nil {
+ logrus.Errorf("flash package dir %s err: %v", curPkg, err)
+ res.ReleaseDevice(device)
+ continue
+ }
+ if err := t.Prepare(m.PkgDir(curPkg), device, context.TODO()); err != nil {
+ logrus.Errorf("do test preperation for package %s err: %v", curPkg, err)
+ continue
+ }
+ logrus.Info("now do test suite...")
+ results, err := t.DoTestTask(device, context.TODO())
+ if err != nil {
+ logrus.Errorf("do test suite for package %s err: %v", curPkg, err)
+ continue
+ }
+ for _, r := range results {
+ logrus.Infof("do test case %s at %s done, result is %v", r.TestCaseName, device, r.Status)
+ }
+ logrus.Infof("now analysis test results...")
+ toFotff := rec.HandleResults(t, device, curPkg, results)
+ res.ReleaseDevice(device)
+ rec.Analysis(m, t, curPkg, toFotff)
+ rec.Save()
+ rec.Report(curPkg, t.TaskName())
+ }
+}
+
+func fotff(m pkg.Manager, t tester.Tester, success, fail, testcase string) error {
+ issueURL, err := rec.FindOutTheFirstFail(m, t, testcase, success, fail)
+ if err != nil {
+ logrus.Errorf("failed to find out the first fail: %v", err)
+ return err
+ }
+ logrus.Infof("the first fail found: %v", issueURL)
+ return nil
+}
+
+func initExecutor() (pkg.Manager, tester.Tester) {
+ //TODO load from config file
+ var conf = struct {
+ PkgManager string `key:"pkg_manager" default:"mock"`
+ Tester string `key:"tester" default:"mock"`
+ }{}
+ utils.ParseFromConfigFile("", &conf)
+ newPkgMgrFunc, ok := newPkgMgrFuncs[conf.PkgManager]
+ if !ok {
+ logrus.Panicf("no package manager found for %s", conf.PkgManager)
+ }
+ newTesterFunc, ok := newTesterFuncs[conf.Tester]
+ if !ok {
+ logrus.Panicf("no tester found for %s", conf.Tester)
+ }
+ return newPkgMgrFunc(), newTesterFunc()
+}
diff --git a/tools/fotff/pkg/dayu200/dayu200.go b/tools/fotff/pkg/dayu200/dayu200.go
new file mode 100644
index 0000000000000000000000000000000000000000..986c8a14d88d2d2f05ea898c24afbf988e114abd
--- /dev/null
+++ b/tools/fotff/pkg/dayu200/dayu200.go
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package dayu200
+
+import (
+ "context"
+ "fotff/pkg"
+ "fotff/pkg/gitee_common"
+ "fotff/res"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "strconv"
+ "strings"
+)
+
+type Manager struct {
+ ArchiveDir string `key:"archive_dir" default:"archive"`
+ WatchCI string `key:"watch_ci" default:"false"`
+ Workspace string `key:"workspace" default:"workspace"`
+ Branch string `key:"branch" default:"master"`
+ ManifestBranch string `key:"manifest_branch" default:"master"`
+ FlashTool string `key:"flash_tool" default:"python"`
+ LocationIDList string `key:"location_id_list"`
+
+ *gitee_common.Manager
+ locations map[string]string
+}
+
+// These commands are copied from ci project.
+const (
+ preCompileCMD = `rm -rf prebuilts/clang/ohos/darwin-x86_64/clang-480513;rm -rf prebuilts/clang/ohos/windows-x86_64/clang-480513;rm -rf prebuilts/clang/ohos/linux-x86_64/clang-480513;bash build/prebuilts_download.sh`
+ // compileCMD is copied from ci project and trim useless build-target 'make_test' to enhance build efficiency.
+ compileCMD = `echo 'start' && export NO_DEVTOOL=1 && export CCACHE_LOG_SUFFIX="dayu200-arm32" && export CCACHE_NOHASHDIR="true" && export CCACHE_SLOPPINESS="include_file_ctime" && ./build.sh --product-name rk3568 --ccache --build-target make_all --gn-args enable_notice_collection=false`
+)
+
+// This list is copied from ci project. Some of them are not available, has been annotated.
+var imgList = []string{
+ "out/rk3568/packages/phone/images/MiniLoaderAll.bin",
+ "out/rk3568/packages/phone/images/boot_linux.img",
+ "out/rk3568/packages/phone/images/parameter.txt",
+ "out/rk3568/packages/phone/images/system.img",
+ "out/rk3568/packages/phone/images/uboot.img",
+ "out/rk3568/packages/phone/images/userdata.img",
+ "out/rk3568/packages/phone/images/vendor.img",
+ "out/rk3568/packages/phone/images/resource.img",
+ "out/rk3568/packages/phone/images/config.cfg",
+ "out/rk3568/packages/phone/images/ramdisk.img",
+ // "out/rk3568/packages/phone/images/chipset.img",
+ "out/rk3568/packages/phone/images/sys_prod.img",
+ "out/rk3568/packages/phone/images/chip_prod.img",
+ "out/rk3568/packages/phone/images/updater.img",
+ // "out/rk3568/packages/phone/updater/bin/updater_binary",
+}
+
+func NewManager() pkg.Manager {
+ var ret Manager
+ utils.ParseFromConfigFile("dayu200", &ret)
+ watchCI, err := strconv.ParseBool(ret.WatchCI)
+ if err != nil {
+ logrus.Panicf("can not parse 'watch_ci', please check")
+ }
+ ret.Manager = gitee_common.NewManager("dayu200", ret.Branch, ret.ManifestBranch, ret.ArchiveDir, ret.Workspace, watchCI)
+ devs := res.DeviceList()
+ locs := strings.Split(ret.LocationIDList, ",")
+ if len(devs) != len(locs) {
+ logrus.Panicf("location_id_list and devices mismatch")
+ }
+ ret.locations = map[string]string{}
+ for i, loc := range locs {
+ ret.locations[devs[i]] = loc
+ }
+ return &ret
+}
+
+// Flash function implements pkg.Manager. Flash images in the 'pkg' directory to the given device.
+// If not all necessary images are available in the 'pkg' directory, will build them.
+func (m *Manager) Flash(device string, pkg string, ctx context.Context) error {
+ logrus.Infof("now flash %s", pkg)
+ buildConfig := gitee_common.BuildConfig{
+ Pkg: pkg,
+ PreCompileCMD: preCompileCMD,
+ CompileCMD: compileCMD,
+ ImgList: imgList,
+ }
+ if err := m.Build(buildConfig, ctx); err != nil {
+ logrus.Errorf("build %s fail, err: %v", pkg, err)
+ return err
+ }
+ logrus.Infof("%s is available now, start to flash it", pkg)
+ return m.flashDevice(device, pkg, ctx)
+}
diff --git a/tools/fotff/pkg/dayu200/flash.go b/tools/fotff/pkg/dayu200/flash.go
new file mode 100644
index 0000000000000000000000000000000000000000..6ad078e6198979d6022ceeca1a42f1b70f757535
--- /dev/null
+++ b/tools/fotff/pkg/dayu200/flash.go
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package dayu200
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "time"
+)
+
+var partList = []string{"boot_linux", "system", "vendor", "userdata", "resource", "ramdisk", "chipset", "sys-prod", "chip-prod", "updater"}
+
+// All timeouts are calculated on normal cases, we do not certain that timeouts are enough if some sleeps canceled.
+// So simply we do not cancel any Sleep(). TODO: use utils.SleepContext() instead.
+func (m *Manager) flashDevice(device string, pkg string, ctx context.Context) error {
+ if err := utils.TryRebootToLoader(device, ctx); err != nil {
+ return err
+ }
+ if err := m.flashImages(device, pkg, ctx); err != nil {
+ return err
+ }
+ time.Sleep(20 * time.Second) // usually, it takes about 20s to reboot into OpenHarmony
+ if connected := utils.WaitHDC(device, ctx); !connected {
+ logrus.Errorf("flash device %s done, but boot unnormally, hdc connection fail", device)
+ return fmt.Errorf("flash device %s done, but boot unnormally, hdc connection fail", device)
+ }
+ time.Sleep(10 * time.Second) // wait 10s more to ensure system has been started completely
+ logrus.Infof("flash device %s successfully", device)
+ return nil
+}
+
+func (m *Manager) flashImages(device string, pkg string, ctx context.Context) error {
+ logrus.Infof("calling flash tool to flash %s into %s...", pkg, device)
+ locationID := m.locations[device]
+ if locationID == "" {
+ data, _ := utils.ExecCombinedOutputContext(ctx, m.FlashTool, "LD")
+ locationID = strings.TrimPrefix(regexp.MustCompile(`LocationID=\d+`).FindString(string(data)), "LocationID=")
+ if locationID == "" {
+ time.Sleep(5 * time.Second)
+ data, _ := utils.ExecCombinedOutputContext(ctx, m.FlashTool, "LD")
+ locationID = strings.TrimPrefix(regexp.MustCompile(`LocationID=\d+`).FindString(string(data)), "LocationID=")
+ }
+ }
+ logrus.Infof("locationID of %s is [%s]", device, locationID)
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "UL", filepath.Join(m.Workspace, pkg, "MiniLoaderAll.bin"), "-noreset"); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("flash MiniLoaderAll.bin fail: %v", err)
+ time.Sleep(5 * time.Second)
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "UL", filepath.Join(m.Workspace, pkg, "MiniLoaderAll.bin"), "-noreset"); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("flash MiniLoaderAll.bin fail: %v", err)
+ return err
+ }
+ }
+ time.Sleep(3 * time.Second)
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "DI", "-p", filepath.Join(m.Workspace, pkg, "parameter.txt")); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("flash parameter.txt fail: %v", err)
+ return err
+ }
+ time.Sleep(5 * time.Second)
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "DI", "-uboot", filepath.Join(m.Workspace, pkg, "uboot.img"), filepath.Join(m.Workspace, pkg, "parameter.txt")); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("flash device fail: %v", err)
+ return err
+ }
+ time.Sleep(5 * time.Second)
+ for _, part := range partList {
+ if _, err := os.Stat(filepath.Join(m.Workspace, pkg, part+".img")); err != nil {
+ if os.IsNotExist(err) {
+ logrus.Infof("part %s.img not exist, ignored", part)
+ continue
+ }
+ return err
+ }
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "DI", "-"+part, filepath.Join(m.Workspace, pkg, part+".img"), filepath.Join(m.Workspace, pkg, "parameter.txt")); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("flash device fail: %v", err)
+ logrus.Warnf("try again...")
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "DI", "-"+part, filepath.Join(m.Workspace, pkg, part+".img"), filepath.Join(m.Workspace, pkg, "parameter.txt")); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("flash device fail: %v", err)
+ return err
+ }
+ }
+ time.Sleep(3 * time.Second)
+ }
+ time.Sleep(5 * time.Second) // sleep a while for writing
+ if err := utils.ExecContext(ctx, m.FlashTool, "-s", locationID, "RD"); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ return fmt.Errorf("reboot device fail: %v", err)
+ }
+ return nil
+}
diff --git a/tools/fotff/pkg/dayu200/upgrade_tool.exe b/tools/fotff/pkg/dayu200/upgrade_tool.exe
new file mode 100644
index 0000000000000000000000000000000000000000..61030691d5beff7b436a93cac5db7ff4a4f0360a
Binary files /dev/null and b/tools/fotff/pkg/dayu200/upgrade_tool.exe differ
diff --git a/tools/fotff/pkg/gitee_build/gitee_build.go b/tools/fotff/pkg/gitee_build/gitee_build.go
new file mode 100644
index 0000000000000000000000000000000000000000..11bbfd4c779d1bed7236116d1a320d2b2a4a8c86
--- /dev/null
+++ b/tools/fotff/pkg/gitee_build/gitee_build.go
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_build
+
+import (
+ "context"
+ "fotff/pkg"
+ "fotff/pkg/gitee_common"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "strings"
+)
+
+type Manager struct {
+ ArchiveDir string `key:"archive_dir" default:"archive"`
+ Workspace string `key:"workspace" default:"workspace"`
+ Branch string `key:"branch" default:"master"`
+ ManifestBranch string `key:"manifest_branch" default:"master"`
+ Component string `key:"component"`
+ PreCompileCMD string `key:"pre_compile_cmd"`
+ CompileCMD string `key:"compile_cmd"`
+ ImageList []string `key:"image_list"`
+
+ *gitee_common.Manager
+}
+
+func NewManager() pkg.Manager {
+ var ret Manager
+ utils.ParseFromConfigFile("gitee_build", &ret)
+ ret.Manager = gitee_common.NewManager(ret.Component, ret.Branch, ret.ManifestBranch, ret.ArchiveDir, ret.Workspace, true)
+ return &ret
+}
+
+func (m *Manager) GetNewer(cur string) (string, error) {
+ return m.GetNewerOrFail(cur)
+}
+
+// Flash function implements pkg.Manager. Since this gitee_build just tests package buildings,
+// there is no need to flash images actually, just build it and return nil unconditionally.
+func (m *Manager) Flash(device string, pkg string, ctx context.Context) error {
+ logrus.Infof("now flash %s", pkg)
+ buildConfig := gitee_common.BuildConfig{
+ Pkg: pkg,
+ PreCompileCMD: m.PreCompileCMD,
+ CompileCMD: m.CompileCMD,
+ ImgList: m.ImageList,
+ }
+ if m.PkgAvailable(buildConfig) {
+ return nil
+ }
+ if strings.Contains(buildConfig.Pkg, "build_fail") {
+ logrus.Warnf("here is a known build_fail token package")
+ } else {
+ if err := m.BuildNoRetry(buildConfig, true, ctx); err != nil {
+ logrus.Warnf("build %s fail, err: %v", pkg, err)
+ } else {
+ logrus.Infof("%s is available now", pkg)
+ }
+ }
+ logrus.Infof("since fotff just tests package buildings, there is no need to flash images actually, mark flash as a success unconditionally")
+ return nil
+}
diff --git a/tools/fotff/pkg/gitee_common/build.go b/tools/fotff/pkg/gitee_common/build.go
new file mode 100644
index 0000000000000000000000000000000000000000..d2efd04b6551eb34d3d0a03ad17f43d42665056a
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/build.go
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "context"
+ "fmt"
+ "fotff/res"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "os"
+ "path/filepath"
+)
+
+type BuildConfig struct {
+ Pkg string
+ PreCompileCMD string
+ CompileCMD string
+ ImgList []string
+}
+
+func (m *Manager) Build(config BuildConfig, ctx context.Context) error {
+ if m.PkgAvailable(config) {
+ return nil
+ }
+ logrus.Infof("%s is not available", config.Pkg)
+ err := m.BuildNoRetry(config, false, ctx)
+ if err == nil {
+ return nil
+ }
+ logrus.Errorf("build pkg %s err: %v", config.Pkg, err)
+ logrus.Infof("rm out and build pkg %s again...", config.Pkg)
+ err = m.BuildNoRetry(config, true, ctx)
+ if err == nil {
+ return nil
+ }
+ logrus.Errorf("build pkg %s err: %v", config.Pkg, err)
+ return err
+}
+
+// PkgAvailable returns true if all necessary images are all available to flash.
+func (m *Manager) PkgAvailable(config BuildConfig) bool {
+ for _, img := range config.ImgList {
+ imgName := filepath.Base(img)
+ if _, err := os.Stat(filepath.Join(m.Workspace, config.Pkg, imgName)); err != nil {
+ return false
+ }
+ }
+ return true
+}
+
+// BuildNoRetry obtain an available server, download corresponding codes, and run compile commands
+// to build the corresponding package images, then transfer these images to the 'pkg' directory.
+func (m *Manager) BuildNoRetry(config BuildConfig, rm bool, ctx context.Context) error {
+ logrus.Infof("now Build %s", config.Pkg)
+ server := res.GetBuildServer()
+ defer res.ReleaseBuildServer(server)
+ cmd := fmt.Sprintf("mkdir -p %s && cd %s && repo init -u https://gitee.com/openharmony/manifest.git", server.WorkSpace, server.WorkSpace)
+ if err := utils.RunCmdViaSSHContext(ctx, server.Addr, server.User, server.Passwd, cmd); err != nil {
+ return fmt.Errorf("remote: mkdir error: %w", err)
+ }
+ if err := utils.TransFileViaSSH(utils.Upload, server.Addr, server.User, server.Passwd,
+ fmt.Sprintf("%s/.repo/manifest.xml", server.WorkSpace), filepath.Join(m.Workspace, config.Pkg, "manifest_tag.xml")); err != nil {
+ return fmt.Errorf("upload and replace manifest error: %w", err)
+ }
+ // 'git lfs install' may fail due to some git hooks. Call 'git lfs update --force' before install to avoid this situation.
+ cmd = fmt.Sprintf("cd %s && repo sync -c --no-tags --force-remove-dirty && repo forall -c 'git reset --hard && git clean -dfx && git lfs update --force && git lfs install && git lfs pull'", server.WorkSpace)
+ if err := utils.RunCmdViaSSHContext(ctx, server.Addr, server.User, server.Passwd, cmd); err != nil {
+ return fmt.Errorf("remote: repo sync error: %w", err)
+ }
+ cmd = fmt.Sprintf("cd %s && %s", server.WorkSpace, config.PreCompileCMD)
+ if err := utils.RunCmdViaSSHContextNoRetry(ctx, server.Addr, server.User, server.Passwd, cmd); err != nil {
+ return fmt.Errorf("remote: pre-compile command error: %w", err)
+ }
+ if rm {
+ cmd = fmt.Sprintf("cd %s && rm -rf out", server.WorkSpace)
+ if err := utils.RunCmdViaSSHContext(ctx, server.Addr, server.User, server.Passwd, cmd); err != nil {
+ return fmt.Errorf("remote: rm ./out command error: %w", err)
+ }
+ }
+ cmd = fmt.Sprintf("cd %s && %s", server.WorkSpace, config.CompileCMD)
+ if err := utils.RunCmdViaSSHContextNoRetry(ctx, server.Addr, server.User, server.Passwd, cmd); err != nil {
+ return fmt.Errorf("remote: compile command error: %w", err)
+ }
+ // has been built already, pitiful if canceled, so continue copying
+ for _, f := range config.ImgList {
+ imgName := filepath.Base(f)
+ if err := utils.TransFileViaSSH(utils.Download, server.Addr, server.User, server.Passwd,
+ fmt.Sprintf("%s/%s", server.WorkSpace, f), filepath.Join(m.Workspace, config.Pkg, imgName)); err != nil {
+ return fmt.Errorf("download file %s error: %w", f, err)
+ }
+ }
+ return nil
+}
diff --git a/tools/fotff/pkg/gitee_common/common.go b/tools/fotff/pkg/gitee_common/common.go
new file mode 100644
index 0000000000000000000000000000000000000000..18330d4f520ac53ad491c49ffd751125e2d9d0ca
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/common.go
@@ -0,0 +1,153 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "code.cloudfoundry.org/archiver/extractor"
+ "context"
+ "fmt"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+type Manager struct {
+ Component string
+ Branch string
+ ManifestBranch string
+ ArchiveDir string
+ Workspace string
+ WatchCI bool
+}
+
+func NewManager(component string, branch string, manifestBranch string, archiveDir string, workspace string, watchCI bool) *Manager {
+ var ret = Manager{
+ Component: component,
+ Branch: branch,
+ ManifestBranch: manifestBranch,
+ ArchiveDir: archiveDir,
+ Workspace: workspace,
+ WatchCI: watchCI,
+ }
+ go ret.cleanupOutdated()
+ return &ret
+}
+
+func (m *Manager) cleanupOutdated() {
+ t := time.NewTicker(24 * time.Hour)
+ for {
+ <-t.C
+ es, err := os.ReadDir(m.Workspace)
+ if err != nil {
+ logrus.Errorf("can not read %s: %v", m.Workspace, err)
+ continue
+ }
+ for _, e := range es {
+ if !e.IsDir() {
+ continue
+ }
+ path := filepath.Join(m.Workspace, e.Name())
+ info, err := e.Info()
+ if err != nil {
+ logrus.Errorf("can not read %s info: %v", path, err)
+ continue
+ }
+ if time.Now().Sub(info.ModTime()) > 7*24*time.Hour {
+ logrus.Warnf("%s outdated, cleanning up its contents...", path)
+ m.cleanupPkgFiles(path)
+ }
+ }
+ }
+}
+
+func (m *Manager) cleanupPkgFiles(path string) {
+ es, err := os.ReadDir(path)
+ if err != nil {
+ logrus.Errorf("can not read %s: %v", path, err)
+ return
+ }
+ for _, e := range es {
+ if e.Name() == "manifest_tag.xml" || e.Name() == "__last_issue__" {
+ continue
+ }
+ if err := os.RemoveAll(filepath.Join(path, e.Name())); err != nil {
+ logrus.Errorf("remove %s fail: %v", filepath.Join(path, e.Name()), err)
+ }
+ }
+}
+
+// Flash function implements pkg.Manager. Flash images in the 'pkg' directory to the given device.
+func (m *Manager) Flash(device string, pkg string, ctx context.Context) error {
+ logrus.Warnf("not implemented yet")
+ return nil
+}
+
+func (m *Manager) Steps(from, to string) (pkgs []string, err error) {
+ if from == to {
+ return nil, fmt.Errorf("steps err: 'from' %s and 'to' %s are the same", from, to)
+ }
+ if c, found := utils.CacheGet(fmt.Sprintf("%s_steps", m.Component), from+"__to__"+to); found {
+ logrus.Infof("steps from %s to %s are cached", from, to)
+ logrus.Infof("steps: %v", c.([]string))
+ return c.([]string), nil
+ }
+ if pkgs, err = m.stepsFromGitee(from, to); err != nil {
+ logrus.Errorf("failed to gen steps from gitee, err: %v", err)
+ logrus.Warnf("fallback to getting steps from CI...")
+ if pkgs, err = m.stepsFromCI(from, to); err != nil {
+ return pkgs, err
+ }
+ return pkgs, nil
+ }
+ utils.CacheSet(fmt.Sprintf("%s_steps", m.Component), from+"__to__"+to, pkgs)
+ return pkgs, nil
+}
+
+func (m *Manager) LastIssue(pkg string) (string, error) {
+ data, err := os.ReadFile(filepath.Join(m.Workspace, pkg, "__last_issue__"))
+ return string(data), err
+}
+
+func (m *Manager) GetNewer(cur string) (string, error) {
+ var newFile string
+ if m.WatchCI {
+ newFile = m.getNewerFromCI(cur + ".tar.gz")
+ } else {
+ newFile = m.getNewerFileFromDir(cur+".tar.gz", func(files []os.DirEntry, i, j int) bool {
+ ti, _ := parseTime(files[i].Name())
+ tj, _ := parseTime(files[j].Name())
+ return ti.Before(tj)
+ })
+ }
+ ex := extractor.NewTgz()
+ dirName := strings.TrimSuffix(newFile, ".tar.gz")
+ dir := filepath.Join(m.Workspace, dirName)
+ if _, err := os.Stat(dir); err == nil {
+ return dirName, nil
+ }
+ logrus.Infof("extracting %s to %s...", filepath.Join(m.ArchiveDir, newFile), dir)
+ if err := ex.Extract(filepath.Join(m.ArchiveDir, newFile), dir); err != nil {
+ return dirName, err
+ }
+ return dirName, nil
+}
+
+func (m *Manager) PkgDir(pkg string) string {
+ return filepath.Join(m.Workspace, pkg)
+}
diff --git a/tools/fotff/pkg/gitee_common/get_newer_ci.go b/tools/fotff/pkg/gitee_common/get_newer_ci.go
new file mode 100644
index 0000000000000000000000000000000000000000..33fa9881a8c9c652625f4c77a79f1fadd7ce415b
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/get_newer_ci.go
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "encoding/json"
+ "fmt"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "io"
+ "net/http"
+ "os"
+ "path/filepath"
+ "time"
+)
+
+type DailyBuildsQueryParam struct {
+ ProjectName string `json:"projectName"`
+ Branch string `json:"branch"`
+ Component string `json:"component"`
+ BuildStatus string `json:"buildStatus"`
+ StartTime string `json:"startTime"`
+ EndTime string `json:"endTime"`
+ PageNum int `json:"pageNum"`
+ PageSize int `json:"pageSize"`
+}
+
+type DailyBuildsResp struct {
+ Result struct {
+ DailyBuildVos []*DailyBuild `json:"dailyBuildVos"`
+ Total int `json:"total"`
+ } `json:"result"`
+}
+
+type DailyBuild struct {
+ CurrentStatus string `json:"currentStatus"`
+ BuildStartTime string `json:"buildStartTime"`
+ BuildFailReason string `json:"buildFailReason"`
+ Id string `json:"id"`
+ ObsPath string `json:"obsPath"`
+ ImgObsPath string `json:"imgObsPath"`
+}
+
+func (m *Manager) loopCI(param DailyBuildsQueryParam, cur string, getFn func(cur string, resp *DailyBuild) string) string {
+ for {
+ file := func() string {
+ data, err := json.Marshal(param)
+ if err != nil {
+ logrus.Errorf("can not marshal query param: %v", err)
+ return ""
+ }
+ resp, err := utils.DoSimpleHttpReq(http.MethodPost, "http://ci.openharmony.cn/api/ci-backend/ci-portal/v1/dailybuilds", data, map[string]string{"Content-Type": "application/json;charset=UTF-8"})
+ if err != nil {
+ logrus.Errorf("can not query builds: %v", err)
+ return ""
+ }
+ var dailyBuildsResp DailyBuildsResp
+ if err := json.Unmarshal(resp, &dailyBuildsResp); err != nil {
+ logrus.Errorf("can not unmarshal resp [%s]: %v", string(resp), err)
+ return ""
+ }
+ if len(dailyBuildsResp.Result.DailyBuildVos) == 0 {
+ return ""
+ }
+ if dailyBuildsResp.Result.DailyBuildVos[0].CurrentStatus != "end" {
+ return ""
+ }
+ return getFn(cur, dailyBuildsResp.Result.DailyBuildVos[0])
+ }()
+ if file != "" {
+ return file
+ }
+ time.Sleep(10 * time.Minute)
+ }
+}
+
+func (m *Manager) getNewerFromCI(cur string) string {
+ return m.loopCI(DailyBuildsQueryParam{
+ ProjectName: "openharmony",
+ Branch: m.ManifestBranch,
+ Component: m.Component,
+ BuildStatus: "success",
+ PageNum: 1,
+ PageSize: 1,
+ }, cur, m.getNewerDailyBuild)
+}
+
+func (m *Manager) getNewerDailyBuild(cur string, db *DailyBuild) string {
+ p := db.ImgObsPath
+ if p == "" {
+ p = db.ObsPath
+ }
+ if filepath.Base(p) == cur {
+ return ""
+ }
+ logrus.Infof("new package found, name: %s", filepath.Base(p))
+ file, err := m.downloadToWorkspace(p)
+ if err != nil {
+ logrus.Errorf("can not download package %s: %v", p, err)
+ return ""
+ }
+ return file
+}
+
+func (m *Manager) downloadToWorkspace(url string) (string, error) {
+ if _, err := parseTime(filepath.Base(url)); err != nil {
+ logrus.Errorf("can not get package time from %s, skipping", filepath.Base(url))
+ return "", fmt.Errorf("can not get package time from %s, skipping", filepath.Base(url))
+ }
+ logrus.Infof("downloading %s", url)
+ resp, err := utils.DoSimpleHttpReqRaw(http.MethodGet, url, nil, nil)
+ if err != nil {
+ return "", err
+ }
+ defer resp.Body.Close()
+ if err := os.MkdirAll(m.ArchiveDir, 0750); err != nil {
+ return "", err
+ }
+ f, err := os.Create(filepath.Join(m.ArchiveDir, filepath.Base(url)))
+ if err != nil {
+ return "", err
+ }
+ defer f.Close()
+ if _, err := io.CopyBuffer(f, resp.Body, make([]byte, 16*1024*1024)); err != nil {
+ return "", err
+ }
+ logrus.Infof("%s downloaded successfully", url)
+ return filepath.Base(url), nil
+}
diff --git a/tools/fotff/pkg/gitee_common/get_newer_dir.go b/tools/fotff/pkg/gitee_common/get_newer_dir.go
new file mode 100644
index 0000000000000000000000000000000000000000..03ca9a5862f2319e06568d8a54d6d2ea29422a4a
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/get_newer_dir.go
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "github.com/sirupsen/logrus"
+ "os"
+ "sort"
+ "time"
+)
+
+func (m *Manager) getNewerFileFromDir(cur string, less func(files []os.DirEntry, i, j int) bool) string {
+ for {
+ files, err := os.ReadDir(m.ArchiveDir)
+ if err != nil {
+ logrus.Errorf("read dir %s err: %s", m.ArchiveDir, err)
+ time.Sleep(10 * time.Second)
+ continue
+ }
+ sort.Slice(files, func(i, j int) bool {
+ return less(files, i, j)
+ })
+ if len(files) != 0 {
+ f := files[len(files)-1]
+ if f.Name() != cur {
+ logrus.Infof("new package found, name: %s", f.Name())
+ return f.Name()
+ }
+ }
+ time.Sleep(10 * time.Second)
+ }
+}
diff --git a/tools/fotff/pkg/gitee_common/get_newer_or_fail.go b/tools/fotff/pkg/gitee_common/get_newer_or_fail.go
new file mode 100644
index 0000000000000000000000000000000000000000..d6b8ef230d5ed88c62e6fd4e5ae9f5457a6ede2a
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/get_newer_or_fail.go
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "code.cloudfoundry.org/archiver/compressor"
+ "code.cloudfoundry.org/archiver/extractor"
+ "fmt"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "net/http"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+func (m *Manager) GetNewerOrFail(cur string) (string, error) {
+ newFile := m.getNewerOrFailFromCI(cur + ".tar.gz")
+ ex := extractor.NewTgz()
+ dirName := strings.TrimSuffix(newFile, ".tar.gz")
+ dir := filepath.Join(m.Workspace, dirName)
+ if _, err := os.Stat(dir); err == nil {
+ return dirName, nil
+ }
+ logrus.Infof("extracting %s to %s...", filepath.Join(m.ArchiveDir, newFile), dir)
+ if err := ex.Extract(filepath.Join(m.ArchiveDir, newFile), dir); err != nil {
+ return dirName, err
+ }
+ return dirName, nil
+}
+
+func (m *Manager) getNewerOrFailFromCI(cur string) string {
+ return m.loopCI(DailyBuildsQueryParam{
+ ProjectName: "openharmony",
+ Branch: m.ManifestBranch,
+ Component: m.Component,
+ PageNum: 1,
+ PageSize: 1,
+ }, cur, m.getNewerDailyBuildOrFail)
+}
+
+func (m *Manager) getNewerDailyBuildOrFail(cur string, db *DailyBuild) string {
+ switch db.BuildFailReason {
+ case "":
+ return m.getNewerDailyBuild(cur, db)
+ case "compile_failed":
+ lastSuccessTime, err := parseTime(cur)
+ if err != nil {
+ logrus.Errorf("can not get package time from %s, skipping", cur)
+ return ""
+ }
+ nowFailTime, err := parseTime(db.BuildStartTime)
+ if err != nil {
+ logrus.Errorf("can not get time from %s, skipping", cur)
+ return ""
+ }
+ if lastSuccessTime == nowFailTime {
+ return ""
+ }
+ return m.genFailedPackage(lastSuccessTime, nowFailTime)
+ default:
+ return ""
+ }
+}
+
+func (m *Manager) genFailedPackage(lastSuccessTime, failedBuildStartTime time.Time) string {
+ pkg := fmt.Sprintf("%s_%s_build_fail", m.Component, failedBuildStartTime.Format("20060102_150405"))
+ logrus.Infof("getting failed package manifest for %s(%s) at %s", m.Component, m.ManifestBranch, failedBuildStartTime)
+ tags, err := m.getAllTags(lastSuccessTime, failedBuildStartTime)
+ if err != nil {
+ logrus.Errorf("can not get latest tag from ci, err: %v", err)
+ return ""
+ }
+ if len(tags) == 0 {
+ logrus.Error("can not get latest tag from ci, tag list is empty")
+ return ""
+ }
+ if err := os.MkdirAll(filepath.Join(m.Workspace, pkg), 0750); err != nil {
+ logrus.Errorf("can not mkdir %s, err: %v", pkg, err)
+ return ""
+ }
+ resp, err := utils.DoSimpleHttpReq(http.MethodGet, tags[len(tags)-1].TagFileURL, nil, nil)
+ if err != nil {
+ logrus.Errorf("can not get latest tag file from ci, err: %v", err)
+ return ""
+ }
+ err = os.WriteFile(filepath.Join(m.Workspace, pkg, "manifest_tag.xml"), resp, 0640)
+ if err != nil {
+ logrus.Errorf("can not write latest tag file, err: %v", err)
+ return ""
+ }
+ if err := compressor.NewTgz().Compress(filepath.Join(m.Workspace, pkg), filepath.Join(m.ArchiveDir, pkg+".tar.gz")); err != nil {
+ logrus.Errorf("can not write latest tag file, err: %v", err)
+ return ""
+ }
+ return pkg + ".tar.gz"
+}
diff --git a/tools/fotff/pkg/gitee_common/steps_ci.go b/tools/fotff/pkg/gitee_common/steps_ci.go
new file mode 100644
index 0000000000000000000000000000000000000000..b45682d941af8b16f48c152618774799a9cf5357
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/steps_ci.go
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "encoding/json"
+ "fmt"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "net/http"
+ "os"
+ "path/filepath"
+ "sort"
+ "time"
+)
+
+type TagQueryParam struct {
+ ProjectName string `json:"projectName"`
+ Branch string `json:"branch"`
+ ManifestFile string `json:"manifestFile"`
+ StartTime string `json:"startTime"`
+ EndTime string `json:"endTime"`
+ PageNum int `json:"pageNum"`
+ PageSize int `json:"pageSize"`
+}
+
+type TagResp struct {
+ Result struct {
+ TagList []*Tag `json:"tagList"`
+ Total int `json:"total"`
+ } `json:"result"`
+}
+
+type Tag struct {
+ Id string `json:"id"`
+ Issue string `json:"issue"`
+ PrList []string `json:"prList"`
+ TagFileURL string `json:"tagFileUrl"`
+ Timestamp string `json:"timestamp"`
+}
+
+func (m *Manager) stepsFromCI(from, to string) (pkgs []string, err error) {
+ startTime, err := parseTime(from)
+ if err != nil {
+ return nil, err
+ }
+ endTime, err := parseTime(to)
+ if err != nil {
+ return nil, err
+ }
+ return m.getAllStepsFromTags(startTime, endTime)
+}
+
+func (m *Manager) getAllStepsFromTags(from, to time.Time) (pkgs []string, err error) {
+ tags, err := m.getAllTags(from, to)
+ if err != nil {
+ return nil, err
+ }
+ for _, tag := range tags {
+ pkg, err := m.genTagPackage(tag)
+ if err != nil {
+ return nil, err
+ }
+ pkgs = append(pkgs, pkg)
+ }
+ return pkgs, nil
+}
+
+func (m *Manager) getAllTags(from, to time.Time) (ret []*Tag, err error) {
+ var deDup = make(map[string]*Tag)
+ var pageNum = 1
+ for {
+ var q = TagQueryParam{
+ ProjectName: "openharmony",
+ Branch: m.ManifestBranch,
+ ManifestFile: "default.xml",
+ StartTime: from.Local().Format("2006-01-02"),
+ EndTime: to.Local().Format("2006-01-02"),
+ PageNum: pageNum,
+ PageSize: 10000,
+ }
+ data, err := json.Marshal(q)
+ if err != nil {
+ return nil, err
+ }
+ resp, err := utils.DoSimpleHttpReq(http.MethodPost, "http://ci.openharmony.cn/api/ci-backend/ci-portal/v1/build/tag", data, map[string]string{"Content-Type": "application/json;charset=UTF-8"})
+ if err != nil {
+ return nil, err
+ }
+ var tagResp TagResp
+ if err := json.Unmarshal(resp, &tagResp); err != nil {
+ return nil, err
+ }
+ for _, tag := range tagResp.Result.TagList {
+ if _, ok := deDup[tag.Id]; ok {
+ continue
+ }
+ deDup[tag.Id] = tag
+ date, err := time.ParseInLocation("2006-01-02 15:04:05", tag.Timestamp, time.Local)
+ if err != nil {
+ return nil, err
+ }
+ if date.After(from) && date.Before(to) {
+ ret = append(ret, tag)
+ }
+ }
+ if len(deDup) == tagResp.Result.Total {
+ break
+ }
+ pageNum++
+ }
+ sort.Slice(ret, func(i, j int) bool {
+ return ret[i].Timestamp < ret[j].Timestamp
+ })
+ return ret, nil
+}
+
+func (m *Manager) genTagPackage(tag *Tag) (pkg string, err error) {
+ defer func() {
+ logrus.Infof("package dir %s for tag %v generated", pkg, tag.TagFileURL)
+ }()
+ if err := os.MkdirAll(filepath.Join(m.Workspace, tag.Id), 0750); err != nil {
+ return "", err
+ }
+ var issues []string
+ if len(tag.Issue) == 0 {
+ issues = tag.PrList
+ } else {
+ issues = []string{tag.Issue}
+ }
+ if err := os.WriteFile(filepath.Join(m.Workspace, tag.Id, "__last_issue__"), []byte(fmt.Sprintf("%v", issues)), 0640); err != nil {
+ return "", err
+ }
+ resp, err := utils.DoSimpleHttpReq(http.MethodGet, tag.TagFileURL, nil, nil)
+ if err != nil {
+ return "", err
+ }
+ err = os.WriteFile(filepath.Join(m.Workspace, tag.Id, "manifest_tag.xml"), resp, 0640)
+ if err != nil {
+ return "", err
+ }
+ return tag.Id, nil
+}
diff --git a/tools/fotff/pkg/gitee_common/steps_gitee.go b/tools/fotff/pkg/gitee_common/steps_gitee.go
new file mode 100644
index 0000000000000000000000000000000000000000..27540353ce845baf6795693b8168f9d484bca368
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/steps_gitee.go
@@ -0,0 +1,367 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/xml"
+ "fmt"
+ "fotff/vcs"
+ "fotff/vcs/gitee"
+ "github.com/huandu/go-clone"
+ "github.com/sirupsen/logrus"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+)
+
+type IssueInfo struct {
+ visited bool
+ RelatedIssues []string
+ MRs []*gitee.Commit
+ StructCTime string
+ StructureUpdates []*vcs.ProjectUpdate
+}
+
+type Step struct {
+ IssueURLs []string
+ MRs []*gitee.Commit
+ StructCTime string
+ StructureUpdates []*vcs.ProjectUpdate
+}
+
+func (m *Manager) stepsFromGitee(from, to string) (pkgs []string, err error) {
+ updates, err := m.getRepoUpdates(from, to)
+ if err != nil {
+ return nil, err
+ }
+ startTime, err := parseTime(from)
+ if err != nil {
+ return nil, err
+ }
+ endTime, err := parseTime(to)
+ if err != nil {
+ return nil, err
+ }
+ logrus.Infof("find %d repo updates from %s to %s", len(updates), from, to)
+ steps, err := getAllStepsFromGitee(startTime, endTime, m.Branch, m.ManifestBranch, updates)
+ if err != nil {
+ return nil, err
+ }
+ logrus.Infof("find total %d steps from %s to %s", len(steps), from, to)
+ baseManifest, err := vcs.ParseManifestFile(filepath.Join(m.Workspace, from, "manifest_tag.xml"))
+ if err != nil {
+ return nil, err
+ }
+ for _, step := range steps {
+ var newPkg string
+ if newPkg, baseManifest, err = m.genStepPackage(baseManifest, step); err != nil {
+ return nil, err
+ }
+ pkgs = append(pkgs, newPkg)
+ }
+ return pkgs, nil
+}
+
+func (m *Manager) getRepoUpdates(from, to string) (updates []vcs.ProjectUpdate, err error) {
+ m1, err := vcs.ParseManifestFile(filepath.Join(m.Workspace, from, "manifest_tag.xml"))
+ if err != nil {
+ return nil, err
+ }
+ m2, err := vcs.ParseManifestFile(filepath.Join(m.Workspace, to, "manifest_tag.xml"))
+ if err != nil {
+ return nil, err
+ }
+ return vcs.GetRepoUpdates(m1, m2)
+}
+
+func getAllStepsFromGitee(startTime, endTime time.Time, branch string, manifestBranch string, updates []vcs.ProjectUpdate) (ret []Step, err error) {
+ allMRs, err := getAllMRs(startTime, endTime, branch, manifestBranch, updates)
+ if err != nil {
+ return nil, err
+ }
+ issueInfos, err := combineMRsToIssue(allMRs, branch)
+ if err != nil {
+ return nil, err
+ }
+ return combineIssuesToStep(issueInfos)
+}
+
+func getAllMRs(startTime, endTime time.Time, branch string, manifestBranch string, updates []vcs.ProjectUpdate) (allMRs []*gitee.Commit, err error) {
+ var once sync.Once
+ for _, update := range updates {
+ var prs []*gitee.Commit
+ if update.P1.StructureDiff(update.P2) {
+ once.Do(func() {
+ prs, err = gitee.GetBetweenTimeMRs("openharmony", "manifest", manifestBranch, startTime, endTime)
+ })
+ if update.P1 != nil {
+ var p1 []*gitee.Commit
+ p1, err = gitee.GetBetweenTimeMRs("openharmony", update.P1.Name, branch, startTime, endTime)
+ prs = append(prs, p1...)
+ }
+ if update.P2 != nil {
+ var p2 []*gitee.Commit
+ p2, err = gitee.GetBetweenTimeMRs("openharmony", update.P2.Name, branch, startTime, endTime)
+ prs = append(prs, p2...)
+ }
+ } else {
+ prs, err = gitee.GetBetweenMRs(gitee.CompareParam{
+ Head: update.P2.Revision,
+ Base: update.P1.Revision,
+ Owner: "openharmony",
+ Repo: update.P2.Name,
+ })
+ }
+ if err != nil {
+ return nil, err
+ }
+ allMRs = append(allMRs, prs...)
+ }
+ logrus.Infof("find total %d merge request commits of all repo updates", len(allMRs))
+ return
+}
+
+func combineMRsToIssue(allMRs []*gitee.Commit, branch string) (map[string]*IssueInfo, error) {
+ ret := make(map[string]*IssueInfo)
+ for _, mr := range allMRs {
+ num, err := strconv.Atoi(strings.Trim(regexp.MustCompile(`!\d+ `).FindString(mr.Commit.Message), "! "))
+ if err != nil {
+ return nil, fmt.Errorf("parse MR message for %s fail: %s", mr.URL, err)
+ }
+ issues, err := gitee.GetMRIssueURL(mr.Owner, mr.Repo, num)
+ if err != nil {
+ return nil, err
+ }
+ if len(issues) == 0 {
+ issues = []string{mr.URL}
+ }
+ var scs []*vcs.ProjectUpdate
+ var scTime string
+ if mr.Owner == "openharmony" && mr.Repo == "manifest" {
+ if scTime, scs, err = parseStructureUpdates(mr, branch); err != nil {
+ return nil, err
+ }
+ }
+ for i, issue := range issues {
+ if _, ok := ret[issue]; !ok {
+ ret[issue] = &IssueInfo{
+ MRs: []*gitee.Commit{mr},
+ RelatedIssues: append(issues[:i], issues[i+1:]...),
+ StructCTime: scTime,
+ StructureUpdates: scs,
+ }
+ } else {
+ ret[issue] = &IssueInfo{
+ MRs: append(ret[issue].MRs, mr),
+ RelatedIssues: append(ret[issue].RelatedIssues, append(issues[:i], issues[i+1:]...)...),
+ StructCTime: scTime,
+ StructureUpdates: append(ret[issue].StructureUpdates, scs...),
+ }
+ }
+ }
+ }
+ logrus.Infof("find total %d issues of all repo updates", len(ret))
+ return ret, nil
+}
+
+func combineOtherRelatedIssue(parent, self *IssueInfo, all map[string]*IssueInfo) {
+ if self.visited {
+ return
+ }
+ self.visited = true
+ for _, other := range self.RelatedIssues {
+ if son, ok := all[other]; ok {
+ combineOtherRelatedIssue(self, son, all)
+ delete(all, other)
+ }
+ }
+ parent.RelatedIssues = deDupIssues(append(parent.RelatedIssues, self.RelatedIssues...))
+ parent.MRs = deDupMRs(append(parent.MRs, self.MRs...))
+ parent.StructureUpdates = deDupProjectUpdates(append(parent.StructureUpdates, self.StructureUpdates...))
+ if len(parent.StructCTime) != 0 && parent.StructCTime < self.StructCTime {
+ parent.StructCTime = self.StructCTime
+ }
+}
+
+func deDupProjectUpdates(us []*vcs.ProjectUpdate) (retMRs []*vcs.ProjectUpdate) {
+ dupIndexes := make([]bool, len(us))
+ for i := range us {
+ for j := i + 1; j < len(us); j++ {
+ if us[j].P1 == us[i].P1 && us[j].P2 == us[i].P2 {
+ dupIndexes[j] = true
+ }
+ }
+ }
+ for i, dup := range dupIndexes {
+ if dup {
+ continue
+ }
+ retMRs = append(retMRs, us[i])
+ }
+ return
+}
+
+func deDupMRs(mrs []*gitee.Commit) (retMRs []*gitee.Commit) {
+ tmp := make(map[string]*gitee.Commit)
+ for _, m := range mrs {
+ tmp[m.SHA] = m
+ }
+ for _, m := range tmp {
+ retMRs = append(retMRs, m)
+ }
+ return
+}
+
+func deDupIssues(issues []string) (retIssues []string) {
+ tmp := make(map[string]string)
+ for _, i := range issues {
+ tmp[i] = i
+ }
+ for _, i := range tmp {
+ retIssues = append(retIssues, i)
+ }
+ return
+}
+
+// parseStructureUpdates get changed XMLs and parse it to recognize repo structure changes.
+// Since we do not care which revision a repo was, P1 is not welly handled, just assign it not nil for performance.
+func parseStructureUpdates(commit *gitee.Commit, branch string) (string, []*vcs.ProjectUpdate, error) {
+ tmp := make(map[string]vcs.ProjectUpdate)
+ if len(commit.Files) == 0 {
+ // commit that queried from MR req does not contain file details, should fetch again
+ var err error
+ if commit, err = gitee.GetCommit(commit.Owner, commit.Repo, commit.SHA); err != nil {
+ return "", nil, err
+ }
+ }
+ for _, f := range commit.Files {
+ if filepath.Ext(f.Filename) != ".xml" {
+ continue
+ }
+ if err := parseFilePatch(f.Patch, tmp); err != nil {
+ return "", nil, err
+ }
+ }
+ var ret []*vcs.ProjectUpdate
+ for _, pu := range tmp {
+ projectUpdateCopy := pu
+ ret = append(ret, &projectUpdateCopy)
+ }
+ for _, pu := range ret {
+ if pu.P1 == nil && pu.P2 != nil {
+ lastCommit, err := gitee.GetLatestMRBefore("openharmony", pu.P2.Name, branch, commit.Commit.Committer.Date)
+ if err != nil {
+ return "", nil, err
+ }
+ pu.P2.Revision = lastCommit.SHA
+ }
+ }
+ return commit.Commit.Committer.Date, ret, nil
+}
+
+func parseFilePatch(str string, m map[string]vcs.ProjectUpdate) error {
+ sc := bufio.NewScanner(bytes.NewBuffer([]byte(str)))
+ for sc.Scan() {
+ line := sc.Text()
+ var p vcs.Project
+ if strings.HasPrefix(line, "-") {
+ if err := xml.Unmarshal([]byte(line[1:]), &p); err == nil {
+ m[p.Name] = vcs.ProjectUpdate{P1: &p, P2: m[p.Name].P2}
+ }
+ } else if strings.HasPrefix(line, "+") {
+ if err := xml.Unmarshal([]byte(line[1:]), &p); err == nil {
+ m[p.Name] = vcs.ProjectUpdate{P1: m[p.Name].P1, P2: &p}
+ }
+ }
+ }
+ return nil
+}
+
+func combineIssuesToStep(issueInfos map[string]*IssueInfo) (ret []Step, err error) {
+ for _, info := range issueInfos {
+ combineOtherRelatedIssue(info, info, issueInfos)
+ }
+ for issue, infos := range issueInfos {
+ sort.Slice(infos.MRs, func(i, j int) bool {
+ // move the latest MR to the first place, use its merged_time to represent the update time of the issue
+ return infos.MRs[i].Commit.Committer.Date > infos.MRs[j].Commit.Committer.Date
+ })
+ ret = append(ret, Step{
+ IssueURLs: append(infos.RelatedIssues, issue),
+ MRs: infos.MRs,
+ StructCTime: infos.StructCTime,
+ StructureUpdates: infos.StructureUpdates})
+ }
+ sort.Slice(ret, func(i, j int) bool {
+ ti, tj := ret[i].MRs[0].Commit.Committer.Date, ret[j].MRs[0].Commit.Committer.Date
+ if len(ret[i].StructCTime) != 0 {
+ ti = ret[i].StructCTime
+ }
+ if len(ret[j].StructCTime) != 0 {
+ ti = ret[j].StructCTime
+ }
+ return ti < tj
+ })
+ logrus.Infof("find total %d steps of all issues", len(ret))
+ return
+}
+
+func parseTime(pkg string) (time.Time, error) {
+ t, err := time.ParseInLocation(`20060102_150405`, regexp.MustCompile(`\d{8}_\d{6}`).FindString(pkg), time.Local)
+ if err != nil {
+ return time.ParseInLocation(`20060102150405`, regexp.MustCompile(`\d{14}`).FindString(pkg), time.Local)
+ }
+ return t, nil
+}
+
+func (m *Manager) genStepPackage(base *vcs.Manifest, step Step) (newPkg string, newManifest *vcs.Manifest, err error) {
+ defer func() {
+ logrus.Infof("package dir %s for step %v generated", newPkg, step.IssueURLs)
+ }()
+ newManifest = clone.Clone(base).(*vcs.Manifest)
+ for _, u := range step.StructureUpdates {
+ if u.P2 != nil {
+ newManifest.UpdateManifestProject(u.P2.Name, u.P2.Path, u.P2.Remote, u.P2.Revision, true)
+ } else if u.P1 != nil {
+ newManifest.RemoveManifestProject(u.P1.Name)
+ }
+ }
+ for _, mr := range step.MRs {
+ newManifest.UpdateManifestProject(mr.Repo, "", "", mr.SHA, false)
+ }
+ md5sum, err := newManifest.Standardize()
+ if err != nil {
+ return "", nil, err
+ }
+ if err := os.MkdirAll(filepath.Join(m.Workspace, md5sum), 0750); err != nil {
+ return "", nil, err
+ }
+ if err := os.WriteFile(filepath.Join(m.Workspace, md5sum, "__last_issue__"), []byte(fmt.Sprintf("%v", step.IssueURLs)), 0640); err != nil {
+ return "", nil, err
+ }
+ err = newManifest.WriteFile(filepath.Join(m.Workspace, md5sum, "manifest_tag.xml"))
+ if err != nil {
+ return "", nil, err
+ }
+ return md5sum, newManifest, nil
+}
diff --git a/tools/fotff/pkg/gitee_common/steps_gitee_test.go b/tools/fotff/pkg/gitee_common/steps_gitee_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..38f8dcbf60a23863de36f2a62e736507861f2c48
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/steps_gitee_test.go
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee_common
+
+import (
+ "fotff/vcs"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+)
+
+func TestMain(m *testing.M) {
+ defer os.RemoveAll(".fotff")
+ defer os.RemoveAll("logs")
+ m.Run()
+}
+
+func TestManager_Steps(t *testing.T) {
+ m := &Manager{Workspace: "./testdata", Branch: "master"}
+ defer func() {
+ entries, _ := os.ReadDir(m.Workspace)
+ for _, e := range entries {
+ if strings.HasPrefix(e.Name(), "version") {
+ continue
+ }
+ os.RemoveAll(filepath.Join(m.Workspace, e.Name()))
+ }
+ }()
+ tests := []struct {
+ name string
+ from, to string
+ stepsNum int
+ }{
+ {
+ name: "15 MR of 15 steps in 12 repo, with 1 path change",
+ from: "version-Daily_Version-dayu200-20221201_080109-dayu200",
+ to: "version-Daily_Version-dayu200-20221201_100141-dayu200",
+ stepsNum: 15,
+ },
+ {
+ name: "27 MR of 25 steps in 21 repo, with 1 repo add",
+ from: "version-Daily_Version-dayu200-20221213_110027-dayu200",
+ to: "version-Daily_Version-dayu200-20221213_140150-dayu200",
+ stepsNum: 25,
+ },
+ {
+ name: "15 MR of 14 steps in 14 repo, no structure change",
+ from: "version-Daily_Version-dayu200-20221214_100124-dayu200",
+ to: "version-Daily_Version-dayu200-20221214_110125-dayu200",
+ stepsNum: 14,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ ret, err := m.Steps(tt.from, tt.to)
+ if err != nil {
+ t.Fatalf("err: expcect: , actual: %v", err)
+ }
+ if len(ret) != tt.stepsNum {
+ t.Fatalf("steps num: expcect: %d, actual: %v", tt.stepsNum, len(ret))
+ }
+ if tt.stepsNum == 0 {
+ return
+ }
+ mLast, err := vcs.ParseManifestFile(filepath.Join(m.Workspace, ret[len(ret)-1], "manifest_tag.xml"))
+ if err != nil {
+ t.Fatalf("err: expcect: , actual: %v", err)
+ }
+ mLastMD5, err := mLast.Standardize()
+ if err != nil {
+ t.Fatalf("err: expcect: , actual: %v", err)
+ }
+ expected, err := vcs.ParseManifestFile(filepath.Join(m.Workspace, tt.to, "manifest_tag.xml"))
+ if err != nil {
+ t.Fatalf("err: expcect: , actual: %v", err)
+ }
+ expectedMD5, err := expected.Standardize()
+ if err != nil {
+ t.Fatalf("err: expcect: , actual: %v", err)
+ }
+ if mLastMD5 != expectedMD5 {
+ t.Errorf("steps result: expect: %s, actual: %s", expectedMD5, mLastMD5)
+ }
+ })
+ }
+}
diff --git a/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221201_080109-dayu200/manifest_tag.xml b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221201_080109-dayu200/manifest_tag.xml
new file mode 100644
index 0000000000000000000000000000000000000000..d6cddc4f84e1c97f3281172e58b8b0e2667b0774
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221201_080109-dayu200/manifest_tag.xml
@@ -0,0 +1,411 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221201_100141-dayu200/manifest_tag.xml b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221201_100141-dayu200/manifest_tag.xml
new file mode 100644
index 0000000000000000000000000000000000000000..700fe6bff5631e2844556d8538f25f7a7e8cf82a
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221201_100141-dayu200/manifest_tag.xml
@@ -0,0 +1,411 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221213_110027-dayu200/manifest_tag.xml b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221213_110027-dayu200/manifest_tag.xml
new file mode 100644
index 0000000000000000000000000000000000000000..51b92b7533cd97cf832bd6b324678c44fecf4462
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221213_110027-dayu200/manifest_tag.xml
@@ -0,0 +1,411 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221213_140150-dayu200/manifest_tag.xml b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221213_140150-dayu200/manifest_tag.xml
new file mode 100644
index 0000000000000000000000000000000000000000..c1b47deb12363c282b03258f1da25c5c15ba806b
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221213_140150-dayu200/manifest_tag.xml
@@ -0,0 +1,412 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221214_100124-dayu200/manifest_tag.xml b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221214_100124-dayu200/manifest_tag.xml
new file mode 100644
index 0000000000000000000000000000000000000000..b1f452ec78dd5f9eeed10604e06153de8916964e
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221214_100124-dayu200/manifest_tag.xml
@@ -0,0 +1,412 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221214_110125-dayu200/manifest_tag.xml b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221214_110125-dayu200/manifest_tag.xml
new file mode 100644
index 0000000000000000000000000000000000000000..580e14f1e3524bec2adecb05842c409773f6cb0a
--- /dev/null
+++ b/tools/fotff/pkg/gitee_common/testdata/version-Daily_Version-dayu200-20221214_110125-dayu200/manifest_tag.xml
@@ -0,0 +1,412 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/fotff/pkg/mock/mock.go b/tools/fotff/pkg/mock/mock.go
new file mode 100644
index 0000000000000000000000000000000000000000..be38bf5850b127ea601fd66e2afd099ef929f405
--- /dev/null
+++ b/tools/fotff/pkg/mock/mock.go
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package mock
+
+import (
+ "context"
+ "fmt"
+ "fotff/pkg"
+ "github.com/sirupsen/logrus"
+ "time"
+)
+
+type Manager struct {
+ pkgCount int
+}
+
+func NewManager() pkg.Manager {
+ return &Manager{}
+}
+
+func (m *Manager) LastIssue(pkg string) (string, error) {
+ ret := fmt.Sprintf("https://testserver.com/issues/%s", pkg)
+ logrus.Infof("LastIssue: mock implementation returns %s", ret)
+ return ret, nil
+}
+
+func (m *Manager) Steps(from, to string) ([]string, error) {
+ var ret = []string{"step1", "step2", "step3"}
+ for i := range ret {
+ ret[i] = fmt.Sprintf("%s-%s-%s", from, to, ret[i])
+ }
+ logrus.Infof("Steps: mock implementation returns %v", ret)
+ return ret, nil
+}
+
+func (m *Manager) GetNewer(cur string) (string, error) {
+ ret := fmt.Sprintf("pkg%d", m.pkgCount)
+ time.Sleep(time.Duration(m.pkgCount) * time.Second)
+ m.pkgCount++
+ logrus.Infof("GetNewer: mock implementation returns %s", ret)
+ return ret, nil
+}
+
+func (m *Manager) Flash(device string, pkg string, ctx context.Context) error {
+ time.Sleep(time.Second)
+ logrus.Infof("Flash: flashing %s to %s, mock implementation returns OK unconditionally", pkg, device)
+ return nil
+}
+
+func (m *Manager) PkgDir(pkg string) string {
+ return pkg
+}
diff --git a/tools/fotff/pkg/pkg.go b/tools/fotff/pkg/pkg.go
new file mode 100644
index 0000000000000000000000000000000000000000..0e434b4e147711792c3e7c3674e6437dda5b171b
--- /dev/null
+++ b/tools/fotff/pkg/pkg.go
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package pkg
+
+import (
+ "context"
+)
+
+type NewFunc func() Manager
+
+type Manager interface {
+ // Flash download given package to the device.
+ Flash(device string, pkg string, ctx context.Context) error
+ // LastIssue returns the last issue URL related to the package.
+ LastIssue(pkg string) (string, error)
+ // Steps generates every intermediate package and returns the list sequentially.
+ Steps(from, to string) ([]string, error)
+ // GetNewer blocks the process until a newer package is found, then returns the newest one.
+ GetNewer(cur string) (string, error)
+ // PkgDir returns where pkg exists in the filesystem.
+ PkgDir(pkg string) string
+}
diff --git a/tools/fotff/rec/flashandtest.go b/tools/fotff/rec/flashandtest.go
new file mode 100644
index 0000000000000000000000000000000000000000..523270dd6831b198d67625b12fc4a7133c8de4b8
--- /dev/null
+++ b/tools/fotff/rec/flashandtest.go
@@ -0,0 +1,64 @@
+package rec
+
+import (
+ "context"
+ "fotff/pkg"
+ "fotff/tester"
+
+ "github.com/sirupsen/logrus"
+)
+
+// FlashAndTestOptions specify which pkg.Manager and which tester to use to flash and test the specified version
+type FlashAndTestOptions struct {
+ M pkg.Manager
+ T tester.Tester
+ Version string
+ Device string
+ TestCase string
+}
+
+// FlashAndTest build and flash the given version to the specified device, then run the specified test cases
+func FlashAndTest(ctx context.Context, opt *FlashAndTestOptions) error {
+ // flash the specified version to the specified device
+ if err := opt.M.Flash(opt.Device, opt.Version, ctx); err != nil {
+ logrus.Errorf("Failed to flash version %s to device %s, error: %s", opt.Version, opt.Device, err.Error())
+ return err
+ }
+
+ // prepare and run the specified test
+ if err := opt.T.Prepare(opt.M.PkgDir(opt.Version), opt.Device, ctx); err != nil {
+ logrus.Errorf("Failed to prepare test, error: %s", err.Error())
+ return err
+ }
+
+ if opt.TestCase == "" {
+ // run all test cases if the --testcase argument was not present
+ results, err := opt.T.DoTestTask(opt.Device, ctx)
+ if err != nil {
+ logrus.Errorf("Failed to run all test cases on device %s, error: %s", opt.Device, err.Error())
+ return err
+ }
+
+ // only mark test result as pass when all test cases passed
+ var result tester.ResultStatus = tester.ResultPass
+ for _, r := range results {
+ logrus.Infof("Result for test case %s is %s", r.TestCaseName, r.Status)
+ if r.Status == tester.ResultFail {
+ result = tester.ResultFail
+ }
+ }
+
+ logrus.Infof("Flash and test for all test cases done, result: %s", result)
+ return nil
+ } else {
+ // otherwise run the specified test case
+ result, err := opt.T.DoTestCase(opt.Device, opt.TestCase, ctx)
+ if err != nil {
+ logrus.Errorf("Failed to run test case %s on device %s, error: %s", opt.TestCase, opt.Device, err.Error())
+ return err
+ }
+
+ logrus.Infof("Flash and test for test case %s done, result: %s", opt.TestCase, result)
+ return nil
+ }
+}
diff --git a/tools/fotff/rec/fotff.go b/tools/fotff/rec/fotff.go
new file mode 100644
index 0000000000000000000000000000000000000000..64cccd89c4101bea7f1ff185548fe030e3109b93
--- /dev/null
+++ b/tools/fotff/rec/fotff.go
@@ -0,0 +1,173 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rec
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "fotff/pkg"
+ "fotff/res"
+ "fotff/tester"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "math"
+ "sync"
+)
+
+type cancelCtx struct {
+ ctx context.Context
+ fn context.CancelFunc
+}
+
+// FindOutTheFirstFail returns the first issue URL that introduce the failure.
+// 'fellows' are optional, these testcases may be tested with target testcase together.
+func FindOutTheFirstFail(m pkg.Manager, t tester.Tester, testCase string, successPkg string, failPkg string, fellows ...string) (string, error) {
+ if successPkg == "" {
+ return "", fmt.Errorf("can not get a success package for %s", testCase)
+ }
+ steps, err := m.Steps(successPkg, failPkg)
+ if err != nil {
+ return "", err
+ }
+ return findOutTheFirstFail(m, t, testCase, steps, fellows...)
+}
+
+// findOutTheFirstFail is the recursive implementation to find out the first issue URL that introduce the failure.
+// Arg steps' length must be grater than 1. The last step is a pre-known failure, while the rests are not tested.
+// 'fellows' are optional. In the last recursive term, they have the same result as what the target testcases has.
+// These fellows can be tested with target testcase together in this term to accelerate testing.
+func findOutTheFirstFail(m pkg.Manager, t tester.Tester, testcase string, steps []string, fellows ...string) (string, error) {
+ if len(steps) == 0 {
+ return "", errors.New("steps are no between (success, failure], perhaps the failure is occasional")
+ }
+ logrus.Infof("now use %d-section search to find out the first fault, the length of range is %d, between [%s, %s]", res.Num()+1, len(steps), steps[0], steps[len(steps)-1])
+ if len(steps) == 1 {
+ return m.LastIssue(steps[0])
+ }
+ // calculate gaps between every check point of N-section search. At least 1, or will cause duplicated tests.
+ gapLen := float64(len(steps)-1) / float64(res.Num()+1)
+ if gapLen < 1 {
+ gapLen = 1
+ }
+ // 'success' and 'fail' record the left/right steps indexes of the next term recursive call.
+ // Here defines functions and surrounding helpers to update success/fail indexes and cancel un-needed tests.
+ success, fail := -1, len(steps)-1
+ var lock sync.Mutex
+ var contexts []cancelCtx
+ updateRange := func(pass bool, index int) {
+ lock.Lock()
+ defer lock.Unlock()
+ if pass && index > success {
+ success = index
+ for _, ctx := range contexts {
+ if ctx.ctx.Value("index").(int) < success {
+ ctx.fn()
+ }
+ }
+ }
+ if !pass && index < fail {
+ fail = index
+ for _, ctx := range contexts {
+ if ctx.ctx.Value("index").(int) > fail {
+ ctx.fn()
+ }
+ }
+ }
+ }
+ // Now, start all tests concurrently.
+ var wg sync.WaitGroup
+ start := make(chan struct{})
+ for i := 1; i <= res.Num(); i++ {
+ // Since the last step is a pre-known failure, we start index from the tail to avoid testing the last one.
+ // Otherwise, if the last step is the only one we test this term, we can not narrow ranges to continue.
+ index := len(steps) - 1 - int(math.Round(float64(i)*gapLen))
+ if index < 0 {
+ break
+ }
+ ctx, fn := context.WithCancel(context.WithValue(context.TODO(), "index", index))
+ contexts = append(contexts, cancelCtx{ctx: ctx, fn: fn})
+ wg.Add(1)
+ go func(index int, ctx context.Context) {
+ defer wg.Done()
+ // Start after all test goroutine's contexts are registered.
+ // Otherwise, contexts that not registered yet may out of controlling.
+ <-start
+ var pass bool
+ var err error
+ pass, fellows, err = flashAndTest(m, t, steps[index], testcase, ctx, fellows...)
+ if err != nil {
+ if errors.Is(err, context.Canceled) {
+ logrus.Warnf("abort to flash %s and test %s: %v", steps[index], testcase, err)
+ } else {
+ logrus.Errorf("flash %s and test %s fail: %v", steps[index], testcase, err)
+ }
+ return
+ }
+ updateRange(pass, index)
+ }(index, ctx)
+ }
+ close(start)
+ wg.Wait()
+ if fail-success == len(steps) {
+ return "", errors.New("all judgements failed, can not narrow ranges to continue")
+ }
+ return findOutTheFirstFail(m, t, testcase, steps[success+1:fail+1], fellows...)
+}
+
+func flashAndTest(m pkg.Manager, t tester.Tester, pkg string, testcase string, ctx context.Context, fellows ...string) (bool, []string, error) {
+ var newFellows []string
+ if result, found := utils.CacheGet("testcase_result", testcase+"__at__"+pkg); found {
+ logrus.Infof("get testcase result %s from cache done, result is %s", result.(tester.Result).TestCaseName, result.(tester.Result).Status)
+ for _, fellow := range fellows {
+ if fellowResult, fellowFound := utils.CacheGet("testcase_result", fellow+"__at__"+pkg); fellowFound {
+ logrus.Infof("get testcase result %s from cache done, result is %s", fellowResult.(tester.Result).TestCaseName, fellowResult.(tester.Result).Status)
+ if fellowResult.(tester.Result).Status == result.(tester.Result).Status {
+ newFellows = append(newFellows, fellow)
+ }
+ }
+ }
+ return result.(tester.Result).Status == tester.ResultPass, newFellows, nil
+ }
+ var results []tester.Result
+ device := res.GetDevice()
+ defer res.ReleaseDevice(device)
+ if err := m.Flash(device, pkg, ctx); err != nil && !errors.Is(err, context.Canceled) {
+ return false, newFellows, err
+ } else {
+ if err = t.Prepare(m.PkgDir(pkg), device, ctx); err != nil {
+ return false, newFellows, err
+ }
+ results, err = t.DoTestCases(device, append(fellows, testcase), ctx)
+ if err != nil {
+ return false, newFellows, err
+ }
+ }
+ var testcaseStatus tester.ResultStatus
+ for _, result := range results {
+ logrus.Infof("do testcase %s at %s done, result is %s", result.TestCaseName, device, result.Status)
+ if result.TestCaseName == testcase {
+ testcaseStatus = result.Status
+ }
+ utils.CacheSet("testcase_result", result.TestCaseName+"__at__"+pkg, result)
+ }
+ for _, result := range results {
+ if result.TestCaseName != testcase && result.Status == testcaseStatus {
+ newFellows = append(newFellows, result.TestCaseName)
+ }
+ }
+ return testcaseStatus == tester.ResultPass, newFellows, nil
+}
diff --git a/tools/fotff/rec/fotff_test.go b/tools/fotff/rec/fotff_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..1033a0c03c6df750aa3b927d039e76bd8b3dbb8e
--- /dev/null
+++ b/tools/fotff/rec/fotff_test.go
@@ -0,0 +1,300 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rec
+
+import (
+ "context"
+ "crypto/md5"
+ "fmt"
+ "fotff/res"
+ "fotff/tester"
+ "github.com/sirupsen/logrus"
+ "math/rand"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+)
+
+type FotffMocker struct {
+ FirstFail int
+ steps []string
+ lock sync.Mutex
+ runningPkg map[string]string
+}
+
+func init() {
+ rand.Seed(time.Now().UnixNano())
+}
+
+func TestMain(m *testing.M) {
+ defer os.RemoveAll(".fotff")
+ defer os.RemoveAll("logs")
+ m.Run()
+}
+
+func NewFotffMocker(stepsNum int, firstFail int) *FotffMocker {
+ randomPrefix := fmt.Sprintf("%x", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))[:4]
+ steps := make([]string, stepsNum)
+ for i := 1; i <= stepsNum; i++ {
+ steps[i-1] = fmt.Sprintf("%s_%s", randomPrefix, strconv.Itoa(i))
+ }
+ return &FotffMocker{
+ FirstFail: firstFail,
+ steps: steps,
+ runningPkg: map[string]string{},
+ }
+}
+
+func (f *FotffMocker) TaskName() string {
+ return "mocker"
+}
+
+func (f *FotffMocker) Prepare(pkgDir string, device string, ctx context.Context) error {
+ return nil
+}
+
+func (f *FotffMocker) DoTestTask(device string, ctx context.Context) ([]tester.Result, error) {
+ time.Sleep(time.Duration(rand.Intn(1)) * time.Millisecond)
+ select {
+ case <-ctx.Done():
+ return nil, context.Canceled
+ default:
+ }
+ return []tester.Result{{TestCaseName: f.TestCaseName(), Status: tester.ResultFail}}, nil
+}
+
+func (f *FotffMocker) DoTestCase(device string, testcase string, ctx context.Context) (tester.Result, error) {
+ time.Sleep(time.Duration(rand.Intn(1)) * time.Millisecond)
+ select {
+ case <-ctx.Done():
+ return tester.Result{}, context.Canceled
+ default:
+ }
+ f.lock.Lock()
+ _, pkgPrefix, _ := strings.Cut(f.runningPkg[device], "_")
+ pkgOrder, _ := strconv.Atoi(pkgPrefix)
+ f.lock.Unlock()
+ if pkgOrder >= f.FirstFail {
+ logrus.Infof("mock: test %s at %s done, result is %s", testcase, device, tester.ResultFail)
+ return tester.Result{TestCaseName: testcase, Status: tester.ResultFail}, nil
+ }
+ logrus.Infof("mock: test %s at %s done, result is %s", testcase, device, tester.ResultPass)
+ return tester.Result{TestCaseName: testcase, Status: tester.ResultPass}, nil
+}
+
+func (f *FotffMocker) DoTestCases(device string, testcases []string, ctx context.Context) ([]tester.Result, error) {
+ var ret []tester.Result
+ for _, testcase := range testcases {
+ r, err := f.DoTestCase(device, testcase, ctx)
+ if err != nil {
+ return nil, err
+ }
+ ret = append(ret, r)
+ }
+ return ret, nil
+}
+
+func (f *FotffMocker) Flash(device string, pkg string, ctx context.Context) error {
+ time.Sleep(time.Duration(rand.Intn(1)) * time.Millisecond)
+ select {
+ case <-ctx.Done():
+ return context.Canceled
+ default:
+ }
+ f.lock.Lock()
+ f.runningPkg[device] = pkg
+ logrus.Infof("mock: flash %s to %s done", pkg, device)
+ f.lock.Unlock()
+ return nil
+}
+
+func (f *FotffMocker) LastIssue(pkg string) (string, error) {
+ return "issue" + pkg, nil
+}
+
+func (f *FotffMocker) Steps(from, to string) (ret []string, err error) {
+ return f.steps, nil
+}
+
+func (f *FotffMocker) GetNewer(cur string) (string, error) {
+ return "", nil
+}
+
+func (f *FotffMocker) PkgDir(pkg string) string {
+ return pkg
+}
+
+func (f *FotffMocker) TestCaseName() string {
+ return "MOCK_FAILED_TEST_CASE"
+}
+
+func (f *FotffMocker) Last() string {
+ return f.steps[len(f.steps)-1]
+}
+
+func TestFindOutTheFirstFail(t *testing.T) {
+ tests := []struct {
+ name string
+ mocker *FotffMocker
+ }{
+ {
+ name: "0-1(X)",
+ mocker: NewFotffMocker(1, 1),
+ },
+ {
+ name: "0-1(X)-2",
+ mocker: NewFotffMocker(2, 1),
+ },
+ {
+ name: "0-1-2(X)",
+ mocker: NewFotffMocker(2, 2),
+ },
+ {
+ name: "0-1(X)-2-3",
+ mocker: NewFotffMocker(3, 1),
+ },
+ {
+ name: "0-1-2(X)-3",
+ mocker: NewFotffMocker(3, 2),
+ },
+ {
+ name: "0-1-2-3(X)",
+ mocker: NewFotffMocker(3, 3),
+ },
+ {
+ name: "0-1(X)-2-3-4",
+ mocker: NewFotffMocker(4, 1),
+ },
+ {
+ name: "0-1-2(X)-3-4",
+ mocker: NewFotffMocker(4, 2),
+ },
+ {
+ name: "0-1-2-3(X)-4",
+ mocker: NewFotffMocker(4, 3),
+ },
+ {
+ name: "0-1-2-3-4(X)",
+ mocker: NewFotffMocker(4, 4),
+ },
+ {
+ name: "0-1(X)-2-3-4-5",
+ mocker: NewFotffMocker(5, 1),
+ },
+ {
+ name: "0-1-2(X)-3-4-5",
+ mocker: NewFotffMocker(5, 2),
+ },
+ {
+ name: "0-1-2-3(X)-4-5",
+ mocker: NewFotffMocker(5, 3),
+ },
+ {
+ name: "0-1-2-3-4(X)-5",
+ mocker: NewFotffMocker(5, 4),
+ },
+ {
+ name: "0-1-2-3-4-5(X)",
+ mocker: NewFotffMocker(5, 5),
+ },
+ {
+ name: "0-1-2...262143(X)...1048575",
+ mocker: NewFotffMocker(1048575, 262143),
+ },
+ {
+ name: "0-1-2...262144(X)...1048575",
+ mocker: NewFotffMocker(1048575, 262144),
+ },
+ {
+ name: "0-1-2...262145(X)...1048575",
+ mocker: NewFotffMocker(1048575, 262145),
+ },
+ {
+ name: "0-1-2...262143(X)...1048576",
+ mocker: NewFotffMocker(1048576, 262143),
+ },
+ {
+ name: "0-1-2...262144(X)...1048576",
+ mocker: NewFotffMocker(1048576, 262144),
+ },
+ {
+ name: "0-1-2...262145(X)...1048576",
+ mocker: NewFotffMocker(1048576, 262145),
+ },
+ {
+ name: "0-1-2...262143(X)...1048577",
+ mocker: NewFotffMocker(1048577, 262143),
+ },
+ {
+ name: "0-1-2...262144(X)...1048577",
+ mocker: NewFotffMocker(1048577, 262144),
+ },
+ {
+ name: "0-1-2...262145(X)...1048577",
+ mocker: NewFotffMocker(1048577, 262145),
+ },
+ {
+ name: "0-1-2...1234567(X)...10000000",
+ mocker: NewFotffMocker(10000000, 1234567),
+ },
+ {
+ name: "0-1-2...1234567(X)...100000001",
+ mocker: NewFotffMocker(10000001, 1234567),
+ },
+ {
+ name: "0-1-2...7654321(X)...10000000",
+ mocker: NewFotffMocker(10000000, 7654321),
+ },
+ {
+ name: "0-1-2...7654321(X)...10000001",
+ mocker: NewFotffMocker(10000001, 7654321),
+ },
+ {
+ name: "0-1(X)-2...10000000",
+ mocker: NewFotffMocker(10000000, 1),
+ },
+ {
+ name: "0-1(X)-2...10000001",
+ mocker: NewFotffMocker(10000001, 1),
+ },
+ {
+ name: "0-1-2...10000000(X)",
+ mocker: NewFotffMocker(10000000, 10000000),
+ },
+ {
+ name: "0-1-2...10000001(X)",
+ mocker: NewFotffMocker(10000001, 10000001),
+ },
+ }
+ for i := 1; i <= 5; i++ {
+ res.Fake(i)
+ for _, tt := range tests {
+ t.Run(fmt.Sprintf("RES%d:%s", i, tt.name), func(t *testing.T) {
+ ret, err := FindOutTheFirstFail(tt.mocker, tt.mocker, tt.mocker.TestCaseName(), "0", tt.mocker.Last())
+ if err != nil {
+ t.Errorf("err: expcect: , actual: %v", err)
+ }
+ expectIssue, _ := tt.mocker.LastIssue(tt.mocker.steps[tt.mocker.FirstFail-1])
+ if ret != expectIssue {
+ t.Errorf("fotff result: expect: %s, actual: %s", expectIssue, ret)
+ }
+ })
+ }
+ }
+}
diff --git a/tools/fotff/rec/record.go b/tools/fotff/rec/record.go
new file mode 100644
index 0000000000000000000000000000000000000000..52894c68556b84dd25dd4aa78815c5eeaab26863
--- /dev/null
+++ b/tools/fotff/rec/record.go
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rec
+
+import (
+ "context"
+ "encoding/json"
+ "fotff/pkg"
+ "fotff/tester"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "time"
+)
+
+var Records = make(map[string]Record)
+
+func init() {
+ data, err := utils.ReadRuntimeData("records.json")
+ if err != nil {
+ return
+ }
+ if err := json.Unmarshal(data, &Records); err != nil {
+ logrus.Errorf("unmarshal records err: %v", err)
+ }
+}
+
+func Save() {
+ data, err := json.MarshalIndent(Records, "", "\t")
+ if err != nil {
+ logrus.Errorf("marshal records err: %v", err)
+ return
+ }
+ if err := utils.WriteRuntimeData("records.json", data); err != nil {
+ logrus.Errorf("save records err: %v", err)
+ return
+ }
+ logrus.Infof("save records successfully")
+}
+
+func HandleResults(t tester.Tester, dev string, pkgName string, results []tester.Result) []string {
+ var passes, fails []tester.Result
+ for _, result := range results {
+ switch result.Status {
+ case tester.ResultPass:
+ passes = append(passes, result)
+ case tester.ResultFail:
+ fails = append(fails, result)
+ }
+ }
+ handlePassResults(pkgName, passes)
+ return handleFailResults(t, dev, pkgName, fails)
+}
+
+func handlePassResults(pkgName string, results []tester.Result) {
+ for _, result := range results {
+ logrus.Infof("recording [%s] as a success, the lastest success package is [%s]", result.TestCaseName, pkgName)
+ Records[result.TestCaseName] = Record{
+ UpdateTime: time.Now().Format("2006-01-02 15:04:05"),
+ Status: tester.ResultPass,
+ LatestSuccessPkg: pkgName,
+ EarliestFailPkg: "",
+ FailIssueURL: "",
+ }
+ }
+}
+
+func handleFailResults(t tester.Tester, dev string, pkgName string, results []tester.Result) []string {
+ var fotffTestCases []string
+ for _, result := range results {
+ if record, ok := Records[result.TestCaseName]; ok && record.Status != tester.ResultPass {
+ logrus.Warnf("test case %s had failed before, skip handle it", result.TestCaseName)
+ continue
+ }
+ status := tester.ResultFail
+ for i := 0; i < 3; i++ {
+ r, err := t.DoTestCase(dev, result.TestCaseName, context.TODO())
+ if err != nil {
+ logrus.Errorf("failed to do test case %s: %v", result.TestCaseName, err)
+ continue
+ }
+ logrus.Infof("do testcase %s at %s done, result is %s", r.TestCaseName, dev, r.Status)
+ if r.Status == tester.ResultPass {
+ logrus.Warnf("testcase %s result is %s", r.TestCaseName, tester.ResultOccasionalFail)
+ status = tester.ResultOccasionalFail
+ break
+ }
+ }
+ if status == tester.ResultFail && Records[result.TestCaseName].LatestSuccessPkg != "" && Records[result.TestCaseName].EarliestFailPkg == "" {
+ fotffTestCases = append(fotffTestCases, result.TestCaseName)
+ }
+ Records[result.TestCaseName] = Record{
+ UpdateTime: time.Now().Format("2006-01-02 15:04:05"),
+ Status: status,
+ LatestSuccessPkg: Records[result.TestCaseName].LatestSuccessPkg,
+ EarliestFailPkg: pkgName,
+ FailIssueURL: "",
+ }
+ }
+ return fotffTestCases
+}
+
+func Analysis(m pkg.Manager, t tester.Tester, pkgName string, testcases []string) {
+ for i, testcase := range testcases {
+ record := Records[testcase]
+ logrus.Infof("%s failed, the lastest success package is [%s], earliest fail package is [%s], now finding out the first fail...", testcase, record.LatestSuccessPkg, pkgName)
+ issueURL, err := FindOutTheFirstFail(m, t, testcase, record.LatestSuccessPkg, pkgName, testcases[i+1:]...)
+ if err != nil {
+ logrus.Errorf("failed to find out the first fail issue, err: %v", err)
+ issueURL = err.Error()
+ }
+ logrus.Infof("recording %s as a failure, the lastest success package is [%s], the earliest fail package is [%s], fail issue URL is [%s]", testcase, record.LatestSuccessPkg, pkgName, issueURL)
+ Records[testcase] = Record{
+ UpdateTime: time.Now().Format("2006-01-02 15:04:05"),
+ Status: tester.ResultFail,
+ LatestSuccessPkg: record.LatestSuccessPkg,
+ EarliestFailPkg: pkgName,
+ FailIssueURL: issueURL,
+ }
+ }
+}
diff --git a/tools/fotff/rec/report.go b/tools/fotff/rec/report.go
new file mode 100644
index 0000000000000000000000000000000000000000..f8413f15441b179191c53522c26cce9a5ad05811
--- /dev/null
+++ b/tools/fotff/rec/report.go
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rec
+
+import (
+ "code.cloudfoundry.org/archiver/compressor"
+ "fmt"
+ "fotff/tester"
+ "fotff/utils"
+ "github.com/jedib0t/go-pretty/v6/table"
+ "github.com/jedib0t/go-pretty/v6/text"
+ "github.com/sirupsen/logrus"
+ "reflect"
+ "sort"
+)
+
+const css = `
+
+
+`
+
+func Report(curPkg string, taskName string) {
+ subject := fmt.Sprintf("[%s] %s test report", curPkg, taskName)
+ rt := reflect.TypeOf(Record{})
+ tb := table.NewWriter()
+ tb.SetIndexColumn(rt.NumField() + 1)
+ var row = table.Row{"test case"}
+ for i := 0; i < rt.NumField(); i++ {
+ f := rt.Field(i)
+ if f.IsExported() {
+ row = append(row, f.Tag.Get("col"))
+ }
+ }
+ tb.AppendHeader(row)
+ tb.SetRowPainter(func(row table.Row) text.Colors {
+ for _, col := range row {
+ if str, ok := col.(string); ok {
+ if str == tester.ResultFail {
+ return text.Colors{text.BgRed}
+ } else if str == tester.ResultOccasionalFail {
+ return text.Colors{text.BgYellow}
+ }
+ }
+ }
+ return nil
+ })
+ var rows []table.Row
+ for k, rec := range Records {
+ var row = table.Row{k}
+ rv := reflect.ValueOf(rec)
+ for i := 0; i < rv.NumField(); i++ {
+ if rv.Field(i).CanInterface() {
+ row = append(row, rv.Field(i).Interface())
+ }
+ }
+ rows = append(rows, row)
+ }
+ sort.Slice(rows, func(i, j int) bool {
+ return rows[i][0].(string) < rows[j][0].(string)
+ })
+ tb.AppendRows(rows)
+ c := compressor.NewTgz()
+ var attrs []string
+ if utils.LogFile != nil {
+ if err := c.Compress(utils.LogFile.Name(), utils.LogFile.Name()+".tgz"); err != nil {
+ logrus.Errorf("failed to compress %s: %v", utils.LogFile.Name(), err)
+ } else {
+ attrs = append(attrs, utils.LogFile.Name()+".tgz")
+ }
+ }
+ if utils.StdoutFile != nil {
+ if err := c.Compress(utils.StdoutFile.Name(), utils.StdoutFile.Name()+".tgz"); err != nil {
+ logrus.Errorf("failed to compress %s: %v", utils.StdoutFile.Name(), err)
+ } else {
+ attrs = append(attrs, utils.StdoutFile.Name()+".tgz")
+ }
+ }
+ if err := utils.SendMail(subject, css+tb.RenderHTML(), attrs...); err != nil {
+ logrus.Errorf("failed to send report mail: %v", err)
+ return
+ }
+ logrus.Infof("send mail successfully")
+}
diff --git a/tools/fotff/rec/types.go b/tools/fotff/rec/types.go
new file mode 100644
index 0000000000000000000000000000000000000000..4061172e4ff91f31965dc3bac4a9b17c7e253437
--- /dev/null
+++ b/tools/fotff/rec/types.go
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rec
+
+type Record struct {
+ UpdateTime string `col:"update time"`
+ Status string `col:"status"`
+ LatestSuccessPkg string `col:"last success package"`
+ EarliestFailPkg string `col:"earliest fail package"`
+ FailIssueURL string `col:"fail issue url"`
+}
diff --git a/tools/fotff/res/res.go b/tools/fotff/res/res.go
new file mode 100644
index 0000000000000000000000000000000000000000..74a4ef4366c6c1530a8a79ed6c7c981bdfdb3d2c
--- /dev/null
+++ b/tools/fotff/res/res.go
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package res
+
+import (
+ "fmt"
+ "fotff/utils"
+ "strings"
+)
+
+type Resources struct {
+ DeviceSnList string `key:"device_sn_list"`
+ AddrList string `key:"build_server_addr_list" default:"127.0.0.1:22"`
+ User string `key:"build_server_user" default:"root"`
+ Passwd string `key:"build_server_password" default:"root"`
+ // BuildWorkSpace must be absolute
+ BuildWorkSpace string `key:"build_server_workspace" default:"/root/fotff/build_workspace"`
+ devicePool chan string
+ serverPool chan string
+}
+
+type BuildServerInfo struct {
+ Addr string
+ User string
+ Passwd string
+ WorkSpace string
+}
+
+var res Resources
+
+func init() {
+ utils.ParseFromConfigFile("resources", &res)
+ snList := strings.Split(res.DeviceSnList, ",")
+ addrList := strings.Split(res.AddrList, ",")
+ res.devicePool = make(chan string, len(snList))
+ for _, sn := range snList {
+ res.devicePool <- sn
+ }
+ res.serverPool = make(chan string, len(addrList))
+ for _, addr := range addrList {
+ res.serverPool <- addr
+ }
+}
+
+// Fake set 'n' fake packages and build servers.
+// Just for test only.
+func Fake(n int) {
+ var snList, addrList []string
+ for i := 0; i < n; i++ {
+ snList = append(snList, fmt.Sprintf("device%d", i))
+ addrList = append(addrList, fmt.Sprintf("server%d", i))
+ }
+ res.devicePool = make(chan string, len(snList))
+ for _, sn := range snList {
+ res.devicePool <- sn
+ }
+ res.serverPool = make(chan string, len(addrList))
+ for _, sn := range snList {
+ res.serverPool <- sn
+ }
+}
+
+func Num() int {
+ if cap(res.devicePool) < cap(res.serverPool) {
+ return cap(res.devicePool)
+ }
+ return cap(res.serverPool)
+}
+
+func DeviceList() []string {
+ return strings.Split(res.DeviceSnList, ",")
+}
+
+func GetDevice() string {
+ return <-res.devicePool
+}
+
+func ReleaseDevice(device string) {
+ res.devicePool <- device
+}
+
+func GetBuildServer() BuildServerInfo {
+ addr := <-res.serverPool
+ return BuildServerInfo{
+ Addr: addr,
+ User: res.User,
+ Passwd: res.Passwd,
+ WorkSpace: res.BuildWorkSpace,
+ }
+}
+
+func ReleaseBuildServer(info BuildServerInfo) {
+ res.serverPool <- info.Addr
+}
diff --git a/tools/fotff/tester/common/common.go b/tools/fotff/tester/common/common.go
new file mode 100644
index 0000000000000000000000000000000000000000..24a71fd3580180d6ba6d9231bb313a9b3679aefb
--- /dev/null
+++ b/tools/fotff/tester/common/common.go
@@ -0,0 +1,150 @@
+package common
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "fotff/tester"
+ "fotff/utils"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/sirupsen/logrus"
+)
+
+const (
+ testResultPass = "pass"
+ testCaseFlag = "--test-case"
+ deviceFlag = "--device"
+ resultPathFlag = "--result-path"
+ resultFileFlag = "--result-file"
+)
+
+// Tester is the common tester for most kinds of tests
+type Tester struct {
+ Tool string `key:"tool"`
+ Param string `key:"param"`
+ ResultPath string `key:"result_path"`
+ ResultFile string `key:"result_file"`
+}
+
+// TestResult is the structure of the test result json file
+type TestResult struct {
+ TestCase string `json:"test_case"`
+ Result string `json:"result"`
+}
+
+func NewTester() tester.Tester {
+ t := &Tester{}
+ utils.ParseFromConfigFile("common", t)
+ return t
+}
+
+func (t *Tester) TaskName() string {
+ return "common_tester"
+}
+
+func (t *Tester) Prepare(version string, device string, ctx context.Context) error {
+ return nil
+}
+
+// DoTestTask run all test cases on the specified device
+func (t *Tester) DoTestTask(device string, ctx context.Context) ([]tester.Result, error) {
+ args := strings.Split(t.Param, " ")
+ if device != "" {
+ args = append(args, []string{deviceFlag, device}...)
+ }
+ args = append(args, []string{resultPathFlag, t.ResultPath}...)
+ args = append(args, []string{resultFileFlag, t.ResultFile}...)
+ if err := utils.ExecContext(ctx, t.Tool, args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return nil, err
+ }
+ logrus.Errorf("Failed to do test task on device %s, error: %s", device, err.Error())
+ return nil, err
+ }
+
+ return t.processResult()
+}
+
+// DoTestCase run the specified test case on the specified device
+func (t *Tester) DoTestCase(device string, testCase string, ctx context.Context) (tester.Result, error) {
+ args := strings.Split(t.Param, " ")
+ args = append(args, []string{testCaseFlag, testCase}...)
+ if device != "" {
+ args = append(args, []string{deviceFlag, device}...)
+ }
+ args = append(args, []string{resultPathFlag, t.ResultPath}...)
+ args = append(args, []string{resultFileFlag, t.ResultFile}...)
+ defaultResult := tester.Result{}
+ if err := utils.ExecContext(ctx, t.Tool, args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return defaultResult, err
+ }
+ logrus.Errorf("Failed to do test case %s on device %s, error: %s", testCase, device, err.Error())
+ return defaultResult, err
+ }
+
+ rs, err := t.processResult()
+ if err != nil {
+ return defaultResult, err
+ }
+ if len(rs) == 0 {
+ return defaultResult, fmt.Errorf("failed to process test result: no test result found")
+ }
+ if rs[0].TestCaseName != testCase {
+ return defaultResult, fmt.Errorf("failed to process test result: no matched test result found")
+ }
+
+ logrus.Infof("test case %s on device %s finished, the result is %s", testCase, device, rs[0].Status)
+ return rs[0], nil
+}
+
+// DoTestCases run the specified test cases on the specified device
+func (t *Tester) DoTestCases(device string, testCases []string, ctx context.Context) ([]tester.Result, error) {
+ args := strings.Split(t.Param, " ")
+ args = append(args, testCaseFlag)
+ args = append(args, testCases...)
+ if device != "" {
+ args = append(args, []string{deviceFlag, device}...)
+ }
+ args = append(args, []string{resultPathFlag, t.ResultPath}...)
+ args = append(args, []string{resultFileFlag, t.ResultFile}...)
+ if err := utils.ExecContext(ctx, t.Tool, args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return nil, err
+ }
+ logrus.Errorf("Failed to do test cases %v on device %s, error: %s", testCases, device, err.Error())
+ return nil, err
+ }
+
+ return t.processResult()
+}
+
+// processResult parse the test result file
+func (t *Tester) processResult() ([]tester.Result, error) {
+ resultFile := filepath.Join(t.ResultPath, t.ResultFile)
+ data, err := os.ReadFile(resultFile)
+ if err != nil {
+ logrus.Errorf("Failed to read from result file %s, error: %s", resultFile, err.Error())
+ return nil, err
+ }
+
+ var result []TestResult
+ if err := json.Unmarshal(data, &result); err != nil {
+ logrus.Errorf("Failed to unmarshal test result %s into json array, error: %s", string(data), err.Error())
+ return nil, err
+ }
+
+ var ret []tester.Result
+ for _, r := range result {
+ if r.Result == testResultPass {
+ ret = append(ret, tester.Result{TestCaseName: r.TestCase, Status: tester.ResultPass})
+ } else {
+ ret = append(ret, tester.Result{TestCaseName: r.TestCase, Status: tester.ResultFail})
+ }
+ }
+ return ret, nil
+}
diff --git a/tools/fotff/tester/manual/manual.go b/tools/fotff/tester/manual/manual.go
new file mode 100644
index 0000000000000000000000000000000000000000..e039cc1a642b3daa8b3299252d971b0bf1913274
--- /dev/null
+++ b/tools/fotff/tester/manual/manual.go
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package manual
+
+import (
+ "context"
+ "fmt"
+ "fotff/tester"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "math/rand"
+ "strings"
+ "sync"
+ "time"
+)
+
+type Tester struct {
+ ResultLock sync.Mutex
+}
+
+func init() {
+ rand.Seed(time.Now().UnixNano())
+}
+
+func NewTester() tester.Tester {
+ ret := &Tester{}
+ utils.ParseFromConfigFile("manual", ret)
+ return ret
+}
+
+func (t *Tester) TaskName() string {
+ return "manual_test"
+}
+
+func (t *Tester) Prepare(pkgDir string, device string, ctx context.Context) error {
+ return nil
+}
+
+func (t *Tester) DoTestTask(deviceSN string, ctx context.Context) (ret []tester.Result, err error) {
+ return t.DoTestCases(deviceSN, []string{"default"}, ctx)
+}
+
+func (t *Tester) DoTestCase(deviceSN, testCase string, ctx context.Context) (ret tester.Result, err error) {
+ if deviceSN == "" {
+ deviceSN = "default"
+ }
+ t.ResultLock.Lock()
+ defer t.ResultLock.Unlock()
+ var answer string
+ for {
+ fmt.Printf("please do testcase %s on device %s manually and type the test result, 'pass' or 'fail':\n", testCase, deviceSN)
+ if _, err := fmt.Scanln(&answer); err != nil {
+ logrus.Errorf("failed to scan result: %v", err)
+ continue
+ }
+ switch strings.ToUpper(strings.TrimSpace(answer)) {
+ case "PASS":
+ return tester.Result{TestCaseName: testCase, Status: tester.ResultPass}, nil
+ case "FAIL":
+ return tester.Result{TestCaseName: testCase, Status: tester.ResultFail}, nil
+ default:
+ }
+ }
+}
+
+func (t *Tester) DoTestCases(deviceSN string, testcases []string, ctx context.Context) (ret []tester.Result, err error) {
+ for _, testcase := range testcases {
+ r, err := t.DoTestCase(deviceSN, testcase, ctx)
+ if err != nil {
+ return nil, err
+ }
+ ret = append(ret, r)
+ }
+ return ret, nil
+}
diff --git a/tools/fotff/tester/mock/mock.go b/tools/fotff/tester/mock/mock.go
new file mode 100644
index 0000000000000000000000000000000000000000..93d9b89b94869a3116879c9bea675edb84ffe927
--- /dev/null
+++ b/tools/fotff/tester/mock/mock.go
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package mock
+
+import (
+ "context"
+ "fotff/tester"
+ "github.com/sirupsen/logrus"
+)
+
+type Tester struct{}
+
+func NewTester() tester.Tester {
+ return &Tester{}
+}
+
+func (t *Tester) TaskName() string {
+ return "mock"
+}
+
+func (t *Tester) Prepare(pkgDir string, device string, ctx context.Context) error {
+ return nil
+}
+
+func (t *Tester) DoTestTask(device string, ctx context.Context) ([]tester.Result, error) {
+ logrus.Infof("TEST_001 pass")
+ logrus.Warnf("TEST_002 pass")
+ logrus.Warnf("TEST_003 pass")
+ return []tester.Result{
+ {TestCaseName: "TEST_001", Status: tester.ResultPass},
+ {TestCaseName: "TEST_002", Status: tester.ResultPass},
+ {TestCaseName: "TEST_003", Status: tester.ResultPass},
+ }, nil
+}
+
+func (t *Tester) DoTestCase(device string, testCase string, ctx context.Context) (tester.Result, error) {
+ logrus.Warnf("%s pass", testCase)
+ return tester.Result{TestCaseName: testCase, Status: tester.ResultPass}, nil
+}
+
+func (t *Tester) DoTestCases(device string, testcases []string, ctx context.Context) ([]tester.Result, error) {
+ var ret []tester.Result
+ for _, testcase := range testcases {
+ r, err := t.DoTestCase(device, testcase, ctx)
+ if err != nil {
+ return nil, err
+ }
+ ret = append(ret, r)
+ }
+ return ret, nil
+}
diff --git a/tools/fotff/tester/pkg_available/pkg_available.go b/tools/fotff/tester/pkg_available/pkg_available.go
new file mode 100644
index 0000000000000000000000000000000000000000..602d550e7c79fba5f5a0a64e7d8fd4b8ad5361ad
--- /dev/null
+++ b/tools/fotff/tester/pkg_available/pkg_available.go
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package pkg_available
+
+import (
+ "context"
+ "fotff/tester"
+ "github.com/sirupsen/logrus"
+ "math/rand"
+ "os"
+ "strings"
+ "sync"
+ "time"
+)
+
+type Tester struct {
+ device2PkgDir sync.Map
+}
+
+func init() {
+ rand.Seed(time.Now().UnixNano())
+}
+
+func NewTester() tester.Tester {
+ ret := &Tester{}
+ return ret
+}
+
+func (t *Tester) TaskName() string {
+ return "pkg_available"
+}
+
+func (t *Tester) Prepare(pkgDir string, device string, ctx context.Context) error {
+ t.device2PkgDir.Store(device, pkgDir)
+ return nil
+}
+
+func (t *Tester) DoTestTask(deviceSN string, ctx context.Context) (ret []tester.Result, err error) {
+ return t.DoTestCases(deviceSN, []string{"pkg_available"}, ctx)
+}
+
+func (t *Tester) DoTestCase(deviceSN, testCase string, ctx context.Context) (ret tester.Result, err error) {
+ pkgDir, _ := t.device2PkgDir.Load(deviceSN)
+ es, err := os.ReadDir(pkgDir.(string))
+ if err != nil {
+ logrus.Errorf("can not read dir %s, testcase failed", pkgDir.(string))
+ return tester.Result{TestCaseName: testCase, Status: tester.ResultFail}, nil
+ }
+ for _, e := range es {
+ if strings.HasSuffix(e.Name(), ".img") {
+ logrus.Infof("find image in dir %s, package is avaliable, testcase pass", pkgDir.(string))
+ return tester.Result{TestCaseName: testCase, Status: tester.ResultPass}, nil
+ }
+ }
+ logrus.Infof("no images in dir %s, package is not avaliable, testcase failed", pkgDir.(string))
+ return tester.Result{TestCaseName: testCase, Status: tester.ResultFail}, nil
+}
+
+func (t *Tester) DoTestCases(deviceSN string, testcases []string, ctx context.Context) (ret []tester.Result, err error) {
+ for _, testcase := range testcases {
+ r, err := t.DoTestCase(deviceSN, testcase, ctx)
+ if err != nil {
+ return nil, err
+ }
+ ret = append(ret, r)
+ }
+ return ret, nil
+}
diff --git a/tools/fotff/tester/smoke/smoke.go b/tools/fotff/tester/smoke/smoke.go
new file mode 100644
index 0000000000000000000000000000000000000000..3bc5cb11b9cd8a4fe6d70b8f9a9246705a0e81f8
--- /dev/null
+++ b/tools/fotff/tester/smoke/smoke.go
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package smoke
+
+import (
+ "context"
+ "crypto/md5"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "fotff/tester"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "math/rand"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type Tester struct {
+ Py string `key:"py"`
+ Config string `key:"config"`
+ AnswerPath string `key:"answer_path"`
+ SavePath string `key:"save_path"`
+ ToolsPath string `key:"tools_path"`
+}
+
+func init() {
+ rand.Seed(time.Now().UnixNano())
+}
+
+func NewTester() tester.Tester {
+ ret := &Tester{}
+ utils.ParseFromConfigFile("smoke", ret)
+ return ret
+}
+
+func (t *Tester) TaskName() string {
+ return "smoke_test"
+}
+
+func (t *Tester) Prepare(pkgDir string, device string, ctx context.Context) error {
+ return nil
+}
+
+func (t *Tester) DoTestTask(deviceSN string, ctx context.Context) (ret []tester.Result, err error) {
+ reportDir := fmt.Sprintf("%X", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))
+ if err := os.MkdirAll(filepath.Join(t.SavePath, reportDir), 0755); err != nil {
+ return nil, err
+ }
+ args := []string{t.Py, "--config", t.Config, "--answer_path", t.AnswerPath, "--save_path", filepath.Join(t.SavePath, reportDir), "--tools_path", t.ToolsPath}
+ if deviceSN != "" {
+ args = append(args, "--device_num", deviceSN)
+ }
+ if err := utils.ExecContext(ctx, "python", args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return nil, err
+ }
+ logrus.Errorf("do test suite fail: %v", err)
+ return nil, err
+ }
+ return t.readReport(reportDir)
+}
+
+func (t *Tester) DoTestCase(deviceSN, testCase string, ctx context.Context) (ret tester.Result, err error) {
+ reportDir := fmt.Sprintf("%X", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))
+ if err := os.MkdirAll(filepath.Join(t.SavePath, reportDir), 0755); err != nil {
+ return ret, err
+ }
+ args := []string{t.Py, "--config", t.Config, "--answer_path", t.AnswerPath, "--save_path", filepath.Join(t.SavePath, reportDir), "--tools_path", t.ToolsPath, "--test_num", testCase}
+ if deviceSN != "" {
+ args = append(args, "--device_num", deviceSN)
+ }
+ if err := utils.ExecContext(ctx, "python", args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return ret, err
+ }
+ logrus.Errorf("do test case %s fail: %v", testCase, err)
+ return ret, err
+ }
+ r, err := t.readReport(reportDir)
+ if len(r) == 0 {
+ return ret, fmt.Errorf("read latest report err, no result found")
+ }
+ if r[0].TestCaseName != testCase {
+ return ret, fmt.Errorf("read latest report err, no matched result found")
+ }
+ logrus.Infof("do testcase %s at %s done, result is %s", r[0].TestCaseName, deviceSN, r[0].Status)
+ return r[0], nil
+}
+
+func (t *Tester) DoTestCases(deviceSN string, testcases []string, ctx context.Context) (ret []tester.Result, err error) {
+ reportDir := fmt.Sprintf("%X", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))
+ if err := os.MkdirAll(filepath.Join(t.SavePath, reportDir), 0755); err != nil {
+ return nil, err
+ }
+ args := []string{t.Py, "--config", t.Config, "--answer_path", t.AnswerPath, "--save_path", filepath.Join(t.SavePath, reportDir), "--tools_path", t.ToolsPath, "--test_num", strings.Join(testcases, " ")}
+ if deviceSN != "" {
+ args = append(args, "--device_num", deviceSN)
+ }
+ if err := utils.ExecContext(ctx, "python", args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return ret, err
+ }
+ logrus.Errorf("do test cases %v fail: %v", testcases, err)
+ return ret, err
+ }
+ return t.readReport(reportDir)
+}
+
+func (t *Tester) readReport(reportDir string) (ret []tester.Result, err error) {
+ data, err := os.ReadFile(filepath.Join(t.SavePath, reportDir, "result.json"))
+ if err != nil {
+ logrus.Errorf("read report json fail: %v", err)
+ return nil, err
+ }
+ var result []struct {
+ TestCaseName int `json:"test_case_name"`
+ Status string `json:"status"`
+ }
+ err = json.Unmarshal(data, &result)
+ if err != nil {
+ logrus.Errorf("unmarshal report xml fail: %v", err)
+ return nil, err
+ }
+ for _, r := range result {
+ if r.Status == "pass" {
+ ret = append(ret, tester.Result{TestCaseName: strconv.Itoa(r.TestCaseName), Status: tester.ResultPass})
+ } else {
+ ret = append(ret, tester.Result{TestCaseName: strconv.Itoa(r.TestCaseName), Status: tester.ResultFail})
+ }
+ }
+ return ret, err
+}
diff --git a/tools/fotff/tester/tester.go b/tools/fotff/tester/tester.go
new file mode 100644
index 0000000000000000000000000000000000000000..77eb783679d444ef58cbd26ec2c97d90663c9b9a
--- /dev/null
+++ b/tools/fotff/tester/tester.go
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package tester
+
+import "context"
+
+type ResultStatus string
+
+const (
+ ResultPass = `pass`
+ ResultOccasionalFail = `occasional_fail`
+ ResultFail = `fail`
+)
+
+type Result struct {
+ TestCaseName string
+ Status ResultStatus
+}
+
+type Tester interface {
+ // TaskName returns the name of task which DoTestTask execute.
+ TaskName() string
+ // Prepare do some test preparations for one certain package
+ Prepare(pkgDir string, device string, ctx context.Context) error
+ // DoTestTask do a full test on given device.
+ DoTestTask(device string, ctx context.Context) ([]Result, error)
+ // DoTestCase do a single testcase on given device.
+ DoTestCase(device string, testCase string, ctx context.Context) (Result, error)
+ // DoTestCases do testcases on given device.
+ DoTestCases(device string, testcases []string, ctx context.Context) ([]Result, error)
+}
+
+type NewFunc func() Tester
diff --git a/tools/fotff/tester/xdevice/xdevice.go b/tools/fotff/tester/xdevice/xdevice.go
new file mode 100644
index 0000000000000000000000000000000000000000..4cc7d8d7c50c829c4a2aa01a295dbf0334ee9610
--- /dev/null
+++ b/tools/fotff/tester/xdevice/xdevice.go
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package xdevice
+
+import (
+ "context"
+ "crypto/md5"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "fotff/tester"
+ "fotff/utils"
+ "github.com/sirupsen/logrus"
+ "math/rand"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+const enableTestModeScript = `mount -o rw,remount /; param set persist.ace.testmode.enabled 1; param set persist.sys.hilog.debug.on true; sed -i 's/enforcing/permissive/g' /system/etc/selinux/config; sync; reboot`
+
+type Tester struct {
+ Task string `key:"task" default:"acts"`
+ Config string `key:"config" default:"./config/user_config.xml"`
+ TestCasesPath string `key:"test_cases_path" default:"./testcases"`
+ ResourcePath string `key:"resource_path" default:"./resource"`
+}
+
+type Report struct {
+ XMLName xml.Name `xml:"testsuites"`
+ TestSuite []struct {
+ TestCase []struct {
+ Name string `xml:"name,attr"`
+ Result string `xml:"result,attr"`
+ } `xml:"testcase"`
+ } `xml:"testsuite"`
+}
+
+func init() {
+ rand.Seed(time.Now().UnixNano())
+}
+
+func NewTester() tester.Tester {
+ ret := &Tester{}
+ utils.ParseFromConfigFile("xdevice", ret)
+ return ret
+}
+
+func (t *Tester) TaskName() string {
+ return t.Task
+}
+
+func (t *Tester) Prepare(pkgDir string, device string, ctx context.Context) (err error) {
+ logrus.Info("for xdevice test, try to enable test mode...")
+ if err := utils.HdcShell(enableTestModeScript, device, ctx); err != nil {
+ return err
+ }
+ time.Sleep(20 * time.Second) // usually, it takes about 20s to reboot into OpenHarmony
+ if connected := utils.WaitHDC(device, ctx); !connected {
+ logrus.Errorf("enable test mode at device %s done, but boot unnormally, hdc connection fail", device)
+ return fmt.Errorf("enable test mode at device %s done, but boot unnormally, hdc connection fail", device)
+ }
+ time.Sleep(10 * time.Second) // wait 10s more to ensure system has been started completely
+ logrus.Infof("enable test mode at device %s successfully", device)
+ return nil
+}
+
+func (t *Tester) DoTestTask(deviceSN string, ctx context.Context) (ret []tester.Result, err error) {
+ reportDir := fmt.Sprintf("%X", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))
+ args := []string{"-m", "xdevice", "run", t.Task, "-c", t.Config, "-tcpath", t.TestCasesPath, "-respath", t.ResourcePath, "-rp", reportDir}
+ if deviceSN != "" {
+ args = append(args, "-sn", deviceSN)
+ }
+ if err := utils.ExecContext(ctx, "python", args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return nil, err
+ }
+ logrus.Errorf("do test suite fail: %v", err)
+ return nil, err
+ }
+ return t.readReport(reportDir)
+}
+
+func (t *Tester) DoTestCase(deviceSN, testCase string, ctx context.Context) (ret tester.Result, err error) {
+ reportDir := fmt.Sprintf("%X", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))
+ args := []string{"-m", "xdevice", "run", "-l", testCase, "-c", t.Config, "-tcpath", t.TestCasesPath, "-respath", t.ResourcePath, "-rp", reportDir}
+ if deviceSN != "" {
+ args = append(args, "-sn", deviceSN)
+ }
+ if err := utils.ExecContext(ctx, "python", args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return ret, err
+ }
+ logrus.Errorf("do test case %s fail: %v", testCase, err)
+ return ret, err
+ }
+ r, err := t.readReport(reportDir)
+ if len(r) == 0 {
+ return ret, fmt.Errorf("read latest report err, no result found")
+ }
+ if r[0].TestCaseName != testCase {
+ return ret, fmt.Errorf("read latest report err, no matched result found")
+ }
+ logrus.Infof("do testcase %s at %s done, result is %s", r[0].TestCaseName, deviceSN, r[0].Status)
+ return r[0], nil
+}
+
+func (t *Tester) DoTestCases(deviceSN string, testcases []string, ctx context.Context) (ret []tester.Result, err error) {
+ reportDir := fmt.Sprintf("%X", md5.Sum([]byte(fmt.Sprintf("%d", rand.Int()))))
+ args := []string{"-m", "xdevice", "run", "-l", strings.Join(testcases, ";"), "-c", t.Config, "-tcpath", t.TestCasesPath, "-respath", t.ResourcePath, "-rp", reportDir}
+ if deviceSN != "" {
+ args = append(args, "-sn", deviceSN)
+ }
+ if err := utils.ExecContext(ctx, "python", args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return ret, err
+ }
+ logrus.Errorf("do test cases %v fail: %v", testcases, err)
+ return ret, err
+ }
+ return t.readReport(reportDir)
+}
+
+func (t *Tester) readReport(reportDir string) (ret []tester.Result, err error) {
+ data, err := os.ReadFile(filepath.Join("reports", reportDir, "summary_report.xml"))
+ if err != nil {
+ logrus.Errorf("read report xml fail: %v", err)
+ return nil, err
+ }
+ var report Report
+ err = xml.Unmarshal(data, &report)
+ if err != nil {
+ logrus.Errorf("unmarshal report xml fail: %v", err)
+ return nil, err
+ }
+ for _, s := range report.TestSuite {
+ for _, c := range s.TestCase {
+ var status tester.ResultStatus
+ if c.Result == "true" {
+ status = tester.ResultPass
+ } else {
+ status = tester.ResultFail
+ }
+ ret = append(ret, tester.Result{TestCaseName: c.Name, Status: status})
+ }
+ }
+ return ret, err
+}
diff --git a/tools/fotff/utils/exec.go b/tools/fotff/utils/exec.go
new file mode 100644
index 0000000000000000000000000000000000000000..76857e28435cecfe747874fdce630b452c10020a
--- /dev/null
+++ b/tools/fotff/utils/exec.go
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "github.com/sirupsen/logrus"
+ "io"
+ "os"
+ "os/exec"
+ "time"
+)
+
+func ExecContext(ctx context.Context, name string, args ...string) error {
+ ctx, fn := context.WithTimeout(ctx, 6*time.Hour)
+ defer fn()
+ if err := execContext(ctx, name, args...); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("exec failed: %v, try again...", err)
+ return execContext(ctx, name, args...)
+ }
+ return nil
+}
+
+func execContext(ctx context.Context, name string, args ...string) error {
+ cmdStr := append([]string{name}, args...)
+ logrus.Infof("cmd: %s", cmdStr)
+ cmd := exec.CommandContext(ctx, name, args...)
+ stdout, err := cmd.StdoutPipe()
+ if err != nil {
+ return err
+ }
+ stderr, err := cmd.StderrPipe()
+ if err != nil {
+ return err
+ }
+ if err := cmd.Start(); err != nil {
+ return err
+ }
+ fmt.Printf("[%s] exec %s :\n", time.Now(), cmdStr)
+ go io.Copy(os.Stdout, stdout)
+ go io.Copy(os.Stderr, stderr)
+ return cmd.Wait()
+}
+
+func ExecCombinedOutputContext(ctx context.Context, name string, args ...string) ([]byte, error) {
+ ctx, fn := context.WithTimeout(ctx, 6*time.Hour)
+ defer fn()
+ out, err := execCombinedOutputContext(ctx, name, args...)
+ if err != nil {
+ if errors.Is(err, context.Canceled) {
+ return out, err
+ }
+ logrus.Errorf("exec failed: %v, try again...", err)
+ return execCombinedOutputContext(ctx, name, args...)
+ }
+ return out, nil
+}
+
+func execCombinedOutputContext(ctx context.Context, name string, args ...string) ([]byte, error) {
+ cmdStr := append([]string{name}, args...)
+ logrus.Infof("cmd: %s", cmdStr)
+ out, err := exec.CommandContext(ctx, name, args...).CombinedOutput()
+ fmt.Printf("[%s] exec %s :\n", time.Now(), cmdStr)
+ return out, err
+}
+
+func SleepContext(duration time.Duration, ctx context.Context) {
+ select {
+ case <-time.NewTimer(duration).C:
+ case <-ctx.Done():
+ }
+}
diff --git a/tools/fotff/utils/hdc.go b/tools/fotff/utils/hdc.go
new file mode 100644
index 0000000000000000000000000000000000000000..89b9e6173097c6e9973e1dee599fc94cdee20f16
--- /dev/null
+++ b/tools/fotff/utils/hdc.go
@@ -0,0 +1,81 @@
+package utils
+
+import (
+ "context"
+ "errors"
+ "github.com/sirupsen/logrus"
+ "os/exec"
+ "strings"
+ "time"
+)
+
+var hdc string
+
+func init() {
+ if hdc, _ = exec.LookPath("hdc"); hdc == "" {
+ hdc, _ = exec.LookPath("hdc_std")
+ }
+ if hdc == "" {
+ logrus.Panicf("can not find 'hdc', please install")
+ }
+}
+
+func WaitHDC(device string, ctx context.Context) bool {
+ ctx, cancelFn := context.WithTimeout(ctx, 20*time.Second)
+ defer cancelFn()
+ for {
+ select {
+ case <-ctx.Done():
+ return false
+ default:
+ }
+ ExecContext(ctx, hdc, "kill")
+ time.Sleep(time.Second)
+ ExecContext(ctx, hdc, "start")
+ time.Sleep(time.Second)
+ out, err := ExecCombinedOutputContext(ctx, hdc, "list", "targets")
+ if err != nil {
+ if errors.Is(err, context.Canceled) {
+ return false
+ }
+ logrus.Errorf("failed to list hdc targets: %s, %s", string(out), err)
+ continue
+ }
+ lines := strings.Fields(string(out))
+ for _, dev := range lines {
+ if dev == "[Empty]" {
+ logrus.Warn("can not find any hdc targets")
+ break
+ }
+ if device == "" || dev == device {
+ return true
+ }
+ }
+ logrus.Infof("%s not found", device)
+ }
+}
+
+func TryRebootToLoader(device string, ctx context.Context) error {
+ logrus.Infof("try to reboot %s to loader...", device)
+ defer time.Sleep(5 * time.Second)
+ if connected := WaitHDC(device, ctx); connected {
+ if device == "" {
+ return ExecContext(ctx, hdc, "shell", "reboot", "loader")
+ } else {
+ return ExecContext(ctx, hdc, "-t", device, "shell", "reboot", "loader")
+ }
+ }
+ if err := ctx.Err(); err != nil {
+ return err
+ }
+ logrus.Warn("can not find target hdc device, assume it has been in loader mode")
+ return nil
+}
+
+func HdcShell(cmd, device string, ctx context.Context) error {
+ if device == "" {
+ return ExecContext(ctx, hdc, "shell", cmd)
+ } else {
+ return ExecContext(ctx, hdc, "-t", device, "shell", cmd)
+ }
+}
diff --git a/tools/fotff/utils/http.go b/tools/fotff/utils/http.go
new file mode 100644
index 0000000000000000000000000000000000000000..4b4bfcbe019967c9c40dd09233f9d04146d81c8b
--- /dev/null
+++ b/tools/fotff/utils/http.go
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "bytes"
+ "fmt"
+ "github.com/sirupsen/logrus"
+ "io"
+ "net/http"
+ "time"
+)
+
+func DoSimpleHttpReqRaw(method string, url string, body []byte, header map[string]string) (response *http.Response, err error) {
+ maxRetry := len(proxyList)
+ if maxRetry < 3 {
+ maxRetry = 3
+ }
+ for i := 0; i < maxRetry; i++ {
+ if response, err = doSimpleHttpReqImpl(method, url, body, header); err == nil {
+ return
+ }
+ time.Sleep(time.Second)
+ }
+ return
+}
+
+func DoSimpleHttpReq(method string, url string, body []byte, header map[string]string) (ret []byte, err error) {
+ var resp *http.Response
+ maxRetry := len(proxyList)
+ if maxRetry < 3 {
+ maxRetry = 3
+ }
+ for i := 0; i < maxRetry; i++ {
+ if resp, err = doSimpleHttpReqImpl(method, url, body, header); err == nil {
+ ret, err = io.ReadAll(resp.Body)
+ resp.Body.Close()
+ return
+ }
+ time.Sleep(time.Second)
+ }
+ return
+}
+
+func doSimpleHttpReqImpl(method string, url string, body []byte, header map[string]string) (response *http.Response, err error) {
+ logrus.Infof("%s %s", method, url)
+ req, err := http.NewRequest(method, url, bytes.NewReader(body))
+ if err != nil {
+ return nil, err
+ }
+ for k, v := range header {
+ req.Header.Set(k, v)
+ }
+ resp, err := proxyClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ if resp.StatusCode >= 300 {
+ defer resp.Body.Close()
+ data, _ := io.ReadAll(resp.Body)
+ if resp.StatusCode == http.StatusProxyAuthRequired || resp.StatusCode == http.StatusForbidden {
+ SwitchProxy()
+ }
+ logrus.Errorf("%s %s: code: %d body: %s", method, url, resp.StatusCode, string(data))
+ return nil, fmt.Errorf("%s %s: code: %d body: %s", method, url, resp.StatusCode, string(data))
+ }
+ return resp, nil
+}
diff --git a/tools/fotff/utils/ini.go b/tools/fotff/utils/ini.go
new file mode 100644
index 0000000000000000000000000000000000000000..b0d5e67fdc2380a00e0c59d610a72a438c12be56
--- /dev/null
+++ b/tools/fotff/utils/ini.go
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "github.com/Unknwon/goconfig"
+ "github.com/sirupsen/logrus"
+ "reflect"
+ "strings"
+)
+
+// ParseFromConfigFile parse ini file and set values by the tag of fields.
+// 'p' must be a pointer to the given structure, otherwise will panic.
+// Only process its string fields and its sub structs.
+func ParseFromConfigFile(section string, p any) {
+ conf, err := goconfig.LoadConfigFile("fotff.ini")
+ if err != nil {
+ logrus.Warnf("load config file err: %v", err)
+ }
+ rv := reflect.ValueOf(p)
+ rt := reflect.TypeOf(p)
+ for i := 0; i < rv.Elem().NumField(); i++ {
+ switch rt.Elem().Field(i).Type.Kind() {
+ case reflect.String:
+ key := rt.Elem().Field(i).Tag.Get("key")
+ if key == "" {
+ continue
+ }
+ var v string
+ if conf != nil {
+ v, err = conf.GetValue(section, key)
+ }
+ if conf == nil || err != nil {
+ v = rt.Elem().Field(i).Tag.Get("default")
+ }
+ rv.Elem().Field(i).SetString(v)
+ case reflect.Slice:
+ if rt.Elem().Field(i).Type.Elem().Kind() != reflect.String {
+ break
+ }
+ key := rt.Elem().Field(i).Tag.Get("key")
+ if key == "" {
+ continue
+ }
+ var v string
+ if conf != nil {
+ v, err = conf.GetValue(section, key)
+ }
+ if conf == nil || err != nil {
+ v = rt.Elem().Field(i).Tag.Get("default")
+ }
+ rv.Elem().Field(i).Set(reflect.ValueOf(strings.Split(v, ",")))
+ case reflect.Struct:
+ ParseFromConfigFile(section, rv.Elem().Field(i).Addr().Interface())
+ }
+ }
+}
diff --git a/tools/fotff/utils/log.go b/tools/fotff/utils/log.go
new file mode 100644
index 0000000000000000000000000000000000000000..9c9b8d64cb44a88d1bf62b0763e96c424edfc0bd
--- /dev/null
+++ b/tools/fotff/utils/log.go
@@ -0,0 +1,97 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "fmt"
+ "github.com/sirupsen/logrus"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+var LogFile *os.File
+var StdoutFile *os.File
+var osStdout, osStderr = os.Stdout, os.Stderr
+
+func init() {
+ if err := os.MkdirAll("logs", 0750); err != nil {
+ logrus.Errorf("can not make logs dir: %v", err)
+ return
+ }
+ logrus.SetOutput(os.Stdout)
+ logrus.SetReportCaller(true)
+ logrus.SetFormatter(&logrus.TextFormatter{
+ ForceColors: true,
+ FullTimestamp: true,
+ TimestampFormat: "2006-01-02 15:04:05",
+ CallerPrettyfier: func(f *runtime.Frame) (function string, file string) {
+ funcName := strings.Split(f.Function, ".")
+ fn := funcName[len(funcName)-1]
+ _, filename := filepath.Split(f.File)
+ return fmt.Sprintf("%s()", fn), fmt.Sprintf("%s:%d", filename, f.Line)
+ },
+ })
+}
+
+func ResetLogOutput() {
+ logrus.Info("now log to os stdout...")
+ logrus.SetOutput(osStdout)
+ if LogFile != nil {
+ LogFile.Close()
+ }
+ if StdoutFile != nil {
+ StdoutFile.Close()
+ }
+ LogFile, StdoutFile, os.Stdout, os.Stderr = nil, nil, osStdout, osStderr
+}
+
+func SetLogOutput(pkg string) {
+ file := filepath.Join("logs", pkg+".log")
+ var f *os.File
+ var err error
+ if _, err = os.Stat(file); err == nil {
+ f, err = os.OpenFile(file, os.O_RDWR|os.O_APPEND, 0666)
+ } else {
+ f, err = os.Create(file)
+ }
+ if err != nil {
+ logrus.Errorf("failed to open new log file %s: %v", file, err)
+ return
+ }
+ logrus.Infof("now log to %s", file)
+ logrus.SetOutput(f)
+ if LogFile != nil {
+ LogFile.Close()
+ }
+ LogFile = f
+ stdout := filepath.Join("logs", fmt.Sprintf("%s_stdout.log", pkg))
+ if _, err = os.Stat(stdout); err == nil {
+ f, err = os.OpenFile(stdout, os.O_RDWR|os.O_APPEND, 0666)
+ } else {
+ f, err = os.Create(stdout)
+ }
+ if err != nil {
+ logrus.Errorf("failed to open new stdout log file %s: %v", stdout, err)
+ return
+ }
+ if StdoutFile != nil {
+ StdoutFile.Close()
+ }
+ StdoutFile, os.Stdout, os.Stderr = f, f, f
+ logrus.Infof("re-directing stdout and stderr to %s...", stdout)
+}
diff --git a/tools/fotff/utils/mail.go b/tools/fotff/utils/mail.go
new file mode 100644
index 0000000000000000000000000000000000000000..5bf2485538501d76868342fa0e751974399033dd
--- /dev/null
+++ b/tools/fotff/utils/mail.go
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "crypto/tls"
+ "fmt"
+ "github.com/sirupsen/logrus"
+ "gopkg.in/gomail.v2"
+ "strconv"
+ "strings"
+)
+
+type MailConfig struct {
+ Host string `key:"host" default:""`
+ Port string `key:"port" default:""`
+ port int
+ User string `key:"user" default:""`
+ Password string `key:"password" default:""`
+ From string `key:"from" default:""`
+ To string `key:"to" default:""`
+ toList []string
+}
+
+var mailConfig MailConfig
+
+func init() {
+ ParseFromConfigFile("mail", &mailConfig)
+ if mailConfig.Host != "" {
+ var err error
+ if mailConfig.port, err = strconv.Atoi(mailConfig.Port); err != nil {
+ panic(fmt.Errorf("parse mail port err: %v", err))
+ }
+ mailConfig.toList = strings.Split(mailConfig.To, ",")
+ }
+}
+
+func SendMail(subject string, body string, attachments ...string) error {
+ if mailConfig.Host == "" {
+ logrus.Info("mail not configured, do nothing")
+ return nil
+ }
+ dail := gomail.NewDialer(mailConfig.Host, mailConfig.port, mailConfig.User, mailConfig.Password)
+ dail.TLSConfig = &tls.Config{InsecureSkipVerify: true, ServerName: mailConfig.Host}
+ msg := gomail.NewMessage()
+ msg.SetBody("text/html", body)
+ msg.SetHeader("From", mailConfig.From)
+ msg.SetHeader("To", mailConfig.toList...)
+ msg.SetHeader("Subject", subject)
+ for _, a := range attachments {
+ msg.Attach(a)
+ }
+ return dail.DialAndSend(msg)
+}
diff --git a/tools/fotff/utils/pprof.go b/tools/fotff/utils/pprof.go
new file mode 100644
index 0000000000000000000000000000000000000000..8c505c4112dbba3d416bb1e274eafa2870f1c406
--- /dev/null
+++ b/tools/fotff/utils/pprof.go
@@ -0,0 +1,26 @@
+package utils
+
+import (
+ "github.com/sirupsen/logrus"
+ "net"
+ "net/http"
+ _ "net/http/pprof"
+ "strconv"
+)
+
+func EnablePprof() {
+ var cfg struct {
+ Enable string `key:"enable" default:"true"`
+ Port string `key:"port" default:"80"`
+ }
+ ParseFromConfigFile("pprof", &cfg)
+ if enable, _ := strconv.ParseBool(cfg.Enable); !enable {
+ return
+ }
+ server := &http.Server{Addr: net.JoinHostPort("localhost", cfg.Port)}
+ go func() {
+ if err := server.ListenAndServe(); err != nil {
+ logrus.Errorf("server.ListenAndServe returns error: %v", err)
+ }
+ }()
+}
diff --git a/tools/fotff/utils/proxy.go b/tools/fotff/utils/proxy.go
new file mode 100644
index 0000000000000000000000000000000000000000..25f88c7607e714966d7af1a0aaef01b9b5ad6ab6
--- /dev/null
+++ b/tools/fotff/utils/proxy.go
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "fmt"
+ "github.com/sirupsen/logrus"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync"
+ "time"
+)
+
+type ProxyConfig struct {
+ ServerList string `key:"server_list" default:""`
+ User string `key:"user" default:""`
+ Password string `key:"password" default:""`
+}
+
+var proxyClient = http.DefaultClient
+var (
+ proxyUser string
+ proxyPassword string
+ proxyList []string
+ proxyIndex int
+ proxyLock sync.Mutex
+)
+
+func init() {
+ var config ProxyConfig
+ ParseFromConfigFile("proxy", &config)
+ if len(config.ServerList) != 0 {
+ proxyList = strings.Split(config.ServerList, ",")
+ }
+ proxyUser = config.User
+ proxyPassword = config.Password
+ proxyIndex = len(proxyList)
+ SwitchProxy()
+ t := time.NewTicker(6 * time.Hour)
+ go func() {
+ <-t.C
+ proxyLock.Lock()
+ proxyIndex = len(proxyList)
+ proxyLock.Unlock()
+ }()
+}
+
+func SwitchProxy() {
+ if len(proxyList) == 0 {
+ return
+ }
+ proxyLock.Lock()
+ defer proxyLock.Unlock()
+ proxyIndex++
+ if proxyIndex >= len(proxyList) {
+ proxyIndex = 0
+ }
+ var proxyURL *url.URL
+ var err error
+ logrus.Infof("switching proxy to %s", proxyList[proxyIndex])
+ if proxyUser == "" {
+ proxyURL, err = url.Parse(fmt.Sprintf("http://%s", proxyList[proxyIndex]))
+ } else {
+ proxyURL, err = url.Parse(fmt.Sprintf("http://%s:%s@%s", proxyUser, url.QueryEscape(proxyPassword), proxyList[proxyIndex]))
+ }
+ if err != nil {
+ logrus.Errorf("failed to parse proxy url, err: %v", err)
+ }
+ proxyClient = &http.Client{
+ Transport: &http.Transport{
+ Proxy: http.ProxyURL(proxyURL),
+ },
+ }
+}
diff --git a/tools/fotff/utils/runtime.go b/tools/fotff/utils/runtime.go
new file mode 100644
index 0000000000000000000000000000000000000000..40e499d399b43722e7f3283b40874697bf9f781f
--- /dev/null
+++ b/tools/fotff/utils/runtime.go
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "fmt"
+ "github.com/patrickmn/go-cache"
+ "os"
+ "path/filepath"
+ "time"
+)
+
+var runtimeDir = `.fotff`
+
+var runtimeCache = cache.New(24*time.Hour, time.Hour)
+
+func sectionKey(section, key string) string {
+ return fmt.Sprintf("__%s__%s__", section, key)
+}
+
+func init() {
+ if err := os.MkdirAll(runtimeDir, 0750); err != nil {
+ panic(err)
+ }
+ runtimeCache.LoadFile(filepath.Join(runtimeDir, "fotff.cache"))
+}
+
+func CacheGet(section string, k string) (v any, found bool) {
+ return runtimeCache.Get(sectionKey(section, k))
+}
+
+func CacheSet(section string, k string, v any) error {
+ runtimeCache.Set(sectionKey(section, k), v, cache.DefaultExpiration)
+ return runtimeCache.SaveFile(filepath.Join(runtimeDir, "fotff.cache"))
+}
+
+func WriteRuntimeData(name string, data []byte) error {
+ return os.WriteFile(filepath.Join(runtimeDir, name), data, 0640)
+}
+
+func ReadRuntimeData(name string) ([]byte, error) {
+ return os.ReadFile(filepath.Join(runtimeDir, name))
+}
diff --git a/tools/fotff/utils/ssh.go b/tools/fotff/utils/ssh.go
new file mode 100644
index 0000000000000000000000000000000000000000..ad255d0ed5b45a25c9e2e521c959ce8bda4579b5
--- /dev/null
+++ b/tools/fotff/utils/ssh.go
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package utils
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "github.com/pkg/sftp"
+ "github.com/sirupsen/logrus"
+ "golang.org/x/crypto/ssh"
+ "io"
+ "os"
+ "path/filepath"
+ "time"
+)
+
+func newSSHClient(addr string, user string, passwd string) (*ssh.Client, error) {
+ config := &ssh.ClientConfig{
+ User: user,
+ Auth: []ssh.AuthMethod{ssh.Password(passwd)},
+ HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ }
+ config.SetDefaults()
+ return ssh.Dial("tcp", addr, config)
+}
+
+func RunCmdViaSSHContext(ctx context.Context, addr string, user string, passwd string, cmd string) (err error) {
+ ctx, fn := context.WithTimeout(ctx, 6*time.Hour)
+ defer fn()
+ if err := RunCmdViaSSHContextNoRetry(ctx, addr, user, passwd, cmd); err != nil {
+ if errors.Is(err, context.Canceled) {
+ return err
+ }
+ logrus.Errorf("exec cmd via SSH at %s failed: %v, try again...", addr, err)
+ return RunCmdViaSSHContextNoRetry(ctx, addr, user, passwd, cmd)
+ }
+ return nil
+}
+
+func RunCmdViaSSHContextNoRetry(ctx context.Context, addr string, user string, passwd string, cmd string) (err error) {
+ exit := make(chan struct{})
+ client, err := newSSHClient(addr, user, passwd)
+ if err != nil {
+ logrus.Errorf("new SSH client to %s err: %v", addr, err)
+ return err
+ }
+ defer client.Close()
+ session, err := client.NewSession()
+ if err != nil {
+ return err
+ }
+ defer func() {
+ select {
+ case <-ctx.Done():
+ err = ctx.Err()
+ default:
+ }
+ }()
+ defer close(exit)
+ go func() {
+ select {
+ case <-ctx.Done():
+ case <-exit:
+ }
+ session.Close()
+ }()
+ logrus.Infof("run at %s: %s", addr, cmd)
+ stdin, err := session.StdinPipe()
+ if err != nil {
+ return err
+ }
+ defer stdin.Close()
+ stdout, err := session.StdoutPipe()
+ if err != nil {
+ return err
+ }
+ stderr, err := session.StderrPipe()
+ if err != nil {
+ return err
+ }
+ if err := session.Shell(); err != nil {
+ return err
+ }
+ cmd = fmt.Sprintf("%s\nexit $?\n", cmd)
+ go stdin.Write([]byte(cmd))
+ go io.Copy(os.Stdout, stdout)
+ go io.Copy(os.Stderr, stderr)
+ fmt.Printf("[%s] exec at %s %s :\n", time.Now(), addr, cmd)
+ return session.Wait()
+}
+
+type Direct string
+
+const (
+ Download Direct = "download"
+ Upload Direct = "upload"
+)
+
+func TransFileViaSSH(verb Direct, addr string, user string, passwd string, remoteFile string, localFile string) error {
+ c, err := newSSHClient(addr, user, passwd)
+ if err != nil {
+ logrus.Errorf("new SSH client to %s err: %v", addr, err)
+ return err
+ }
+ defer c.Close()
+ client, err := sftp.NewClient(c)
+ if err != nil {
+ logrus.Errorf("new SFTP client to %s err: %v", addr, err)
+ return err
+ }
+ defer client.Close()
+ var prep string
+ var src, dst io.ReadWriteCloser
+ if verb == Download {
+ prep = "to"
+ if src, err = client.Open(remoteFile); err != nil {
+ return fmt.Errorf("open remote file %s at %s err: %v", remoteFile, addr, err)
+ }
+ defer src.Close()
+ os.RemoveAll(localFile)
+ os.MkdirAll(filepath.Dir(localFile), 0755)
+ if dst, err = os.Create(localFile); err != nil {
+ return fmt.Errorf("create local file err: %v", err)
+ }
+ defer dst.Close()
+ } else {
+ prep = "from"
+ if src, err = os.Open(localFile); err != nil {
+ return fmt.Errorf("open local file err: %v", err)
+ }
+ defer src.Close()
+ client.Remove(remoteFile)
+ client.MkdirAll(filepath.Dir(remoteFile))
+ if dst, err = client.Create(remoteFile); err != nil {
+ return fmt.Errorf("create remote file %s at %s err: %v", remoteFile, addr, err)
+ }
+ defer dst.Close()
+ }
+ logrus.Infof("%sing %s at %s %s %s...", verb, remoteFile, addr, prep, localFile)
+ t1 := time.Now()
+ n, err := io.CopyBuffer(dst, src, make([]byte, 32*1024*1024))
+ if err != nil {
+ logrus.Errorf("%s %s at %s %s %s err: %v", verb, remoteFile, addr, prep, localFile, err)
+ return err
+ }
+ t2 := time.Now()
+ cost := t2.Sub(t1).Seconds()
+ logrus.Infof("%s %s at %s %s %s done, size: %d cost: %.2fs speed: %.2fMB/s", verb, remoteFile, addr, prep, localFile, n, cost, float64(n)/cost/1024/1024)
+ return nil
+}
diff --git a/tools/fotff/vcs/gitee/branch.go b/tools/fotff/vcs/gitee/branch.go
new file mode 100644
index 0000000000000000000000000000000000000000..10c196ffcca4f1f6fd819cdd51ad58b9f2c78be5
--- /dev/null
+++ b/tools/fotff/vcs/gitee/branch.go
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee
+
+import (
+ "encoding/json"
+ "fmt"
+ "fotff/utils"
+ "net/http"
+)
+
+type BranchResp struct {
+ Name string `json:"name"`
+ Commit *Commit `json:"commit"`
+}
+
+func GetBranch(owner, repo, branch string) (*BranchResp, error) {
+ url := fmt.Sprintf("https://gitee.com/api/v5/repos/%s/%s/branches/%s", owner, repo, branch)
+ resp, err := utils.DoSimpleHttpReq(http.MethodGet, url, nil, nil)
+ if err != nil {
+ return nil, err
+ }
+ var branchResp BranchResp
+ if err := json.Unmarshal(resp, &branchResp); err != nil {
+ return nil, err
+ }
+ return &branchResp, nil
+}
diff --git a/tools/fotff/vcs/gitee/commit.go b/tools/fotff/vcs/gitee/commit.go
new file mode 100644
index 0000000000000000000000000000000000000000..21c3c96bff641a980b3599a8dc5e535740e70f26
--- /dev/null
+++ b/tools/fotff/vcs/gitee/commit.go
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee
+
+import (
+ "encoding/json"
+ "fmt"
+ "fotff/utils"
+ "net/http"
+)
+
+func GetCommit(owner, repo, id string) (*Commit, error) {
+ url := fmt.Sprintf("https://gitee.com/api/v5/repos/%s/%s/commits/%s", owner, repo, id)
+ var resp []byte
+ if c, found := utils.CacheGet("gitee", url); found {
+ resp = c.([]byte)
+ } else {
+ var err error
+ resp, err = utils.DoSimpleHttpReq(http.MethodGet, url, nil, nil)
+ if err != nil {
+ return nil, err
+ }
+ utils.CacheSet("gitee", url, resp)
+ }
+ var commitResp Commit
+ if err := json.Unmarshal(resp, &commitResp); err != nil {
+ return nil, err
+ }
+ commitResp.Owner = owner
+ commitResp.Repo = repo
+ return &commitResp, nil
+}
diff --git a/tools/fotff/vcs/gitee/compare.go b/tools/fotff/vcs/gitee/compare.go
new file mode 100644
index 0000000000000000000000000000000000000000..4271d2423804b36edfd939522cf34bc387d6f518
--- /dev/null
+++ b/tools/fotff/vcs/gitee/compare.go
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee
+
+import (
+ "encoding/json"
+ "fmt"
+ "fotff/utils"
+ "net/http"
+ "time"
+)
+
+type CompareParam struct {
+ Head string
+ Base string
+ Repo string
+ Owner string
+}
+
+type CompareResp struct {
+ Commits []*Commit `json:"commits"`
+}
+
+type Commit struct {
+ CommitExtend `json:"-"`
+ URL string `json:"url"`
+ SHA string `json:"sha"`
+ Commit struct {
+ Committer struct {
+ Date string `json:"date"`
+ } `json:"committer"`
+ Message string `json:"message"`
+ } `json:"commit"`
+ Parents []struct {
+ SHA string `json:"sha"`
+ URL string `json:"url"`
+ } `json:"parents"`
+ Files []struct {
+ Filename string `json:"filename"`
+ Status string `json:"status"`
+ Patch string `json:"patch,omitempty"`
+ } `json:"files,omitempty"`
+}
+
+type CommitExtend struct {
+ Owner string
+ Repo string
+}
+
+func GetLatestMRBefore(owner, repo, branch string, before string) (ret *Commit, err error) {
+ branchResp, err := GetBranch(owner, repo, branch)
+ if err != nil {
+ return nil, err
+ }
+ head := branchResp.Commit
+ head.Owner = owner
+ head.Repo = repo
+ for head.Commit.Committer.Date > before {
+ if head, err = GetCommit(owner, repo, head.Parents[0].SHA); err != nil {
+ return nil, err
+ }
+ }
+ return head, nil
+}
+
+func GetBetweenTimeMRs(owner, repo, branch string, from, to time.Time) (ret []*Commit, err error) {
+ branchResp, err := GetBranch(owner, repo, branch)
+ if err != nil {
+ return nil, err
+ }
+ fromStr := from.UTC().Format(time.RFC3339)
+ toStr := to.UTC().Format(time.RFC3339)
+ head := branchResp.Commit
+ head.Owner = owner
+ head.Repo = repo
+ for head.Commit.Committer.Date > fromStr {
+ if head.Commit.Committer.Date < toStr {
+ ret = append(ret, head)
+ }
+ if head, err = GetCommit(owner, repo, head.Parents[0].SHA); err != nil {
+ return nil, err
+ }
+ }
+ return ret, nil
+}
+
+func GetBetweenMRs(param CompareParam) ([]*Commit, error) {
+ commits, err := GetBetweenCommits(param)
+ if err != nil {
+ return nil, err
+ }
+ var ret []*Commit
+ head := param.Head
+ for head != param.Base {
+ for _, commit := range commits {
+ if commit.SHA != head {
+ continue
+ }
+ commit.Owner = param.Owner
+ commit.Repo = param.Repo
+ ret = append(ret, commit)
+ head = commit.Parents[0].SHA
+ }
+ }
+ return ret, nil
+}
+
+func GetBetweenCommits(param CompareParam) ([]*Commit, error) {
+ url := fmt.Sprintf("https://gitee.com/api/v5/repos/%s/%s/compare/%s...%s", param.Owner, param.Repo, param.Base, param.Head)
+ var resp []byte
+ if c, found := utils.CacheGet("gitee", url); found {
+ resp = c.([]byte)
+ } else {
+ var err error
+ resp, err = utils.DoSimpleHttpReq(http.MethodGet, url, nil, nil)
+ if err != nil {
+ return nil, err
+ }
+ utils.CacheSet("gitee", url, resp)
+ }
+ var compareResp CompareResp
+ if err := json.Unmarshal(resp, &compareResp); err != nil {
+ return nil, err
+ }
+ return compareResp.Commits, nil
+}
diff --git a/tools/fotff/vcs/gitee/issue.go b/tools/fotff/vcs/gitee/issue.go
new file mode 100644
index 0000000000000000000000000000000000000000..48a2c04834e31567ede2caef51bcddb6ab725226
--- /dev/null
+++ b/tools/fotff/vcs/gitee/issue.go
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package gitee
+
+import (
+ "encoding/json"
+ "fmt"
+ "fotff/utils"
+ "net/http"
+)
+
+type PRIssueResp struct {
+ URL string `json:"html_url"`
+}
+
+func GetMRIssueURL(owner string, repo string, num int) ([]string, error) {
+ url := fmt.Sprintf("https://gitee.com/api/v5/repos/%s/%s/pulls/%d/issues", owner, repo, num)
+ var resp []byte
+ if c, found := utils.CacheGet("gitee", url); found {
+ resp = c.([]byte)
+ } else {
+ var err error
+ resp, err = utils.DoSimpleHttpReq(http.MethodGet, url, nil, nil)
+ if err != nil {
+ return nil, err
+ }
+ utils.CacheSet("gitee", url, resp)
+ }
+ var prIssues []PRIssueResp
+ if err := json.Unmarshal(resp, &prIssues); err != nil {
+ return nil, err
+ }
+ ret := make([]string, len(prIssues))
+ for i, issue := range prIssues {
+ ret[i] = issue.URL
+ }
+ return ret, nil
+}
diff --git a/tools/fotff/vcs/manifest.go b/tools/fotff/vcs/manifest.go
new file mode 100644
index 0000000000000000000000000000000000000000..93c3caac25f13dbbfa75150f1e400b315f4771d6
--- /dev/null
+++ b/tools/fotff/vcs/manifest.go
@@ -0,0 +1,186 @@
+/*
+ * Copyright (c) 2022 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package vcs
+
+import (
+ "crypto/md5"
+ "encoding/xml"
+ "fmt"
+ "github.com/sirupsen/logrus"
+ "os"
+ "sort"
+)
+
+type Manifest struct {
+ XMLName xml.Name `xml:"manifest"`
+ Remote Remote `xml:"remote"`
+ Default Default `xml:"default"`
+ Projects []Project `xml:"project"`
+}
+
+type Remote struct {
+ Name string `xml:"name,attr"`
+ Fetch string `xml:"fetch,attr"`
+ Review string `xml:"review,attr"`
+}
+
+type Default struct {
+ Remote string `xml:"remote,attr"`
+ Revision string `xml:"revision,attr"`
+ SyncJ string `xml:"sync-j,attr"`
+}
+
+type Project struct {
+ XMLName xml.Name `xml:"project"`
+ Name string `xml:"name,attr"`
+ Path string `xml:"path,attr,omitempty"`
+ Revision string `xml:"revision,attr"`
+ Remote string `xml:"remote,attr,omitempty"`
+ CloneDepth string `xml:"clone-depth,attr,omitempty"`
+ LinkFile []LinkFile `xml:"linkfile,omitempty"`
+}
+
+type LinkFile struct {
+ Src string `xml:"src,attr"`
+ Dest string `xml:"dest,attr"`
+}
+
+type ProjectUpdate struct {
+ P1, P2 *Project
+}
+
+func (p *Project) String() string {
+ if p == nil {
+ return ""
+ }
+ return fmt.Sprintf("<%s>", p.Name)
+}
+
+func (p *Project) StructureDiff(p2 *Project) bool {
+ if p == nil && p2 != nil || p != nil && p2 == nil {
+ return true
+ }
+ if p == nil && p2 == nil {
+ return false
+ }
+ return p.Name != p2.Name || p.Path != p2.Path || p.Remote != p2.Remote
+}
+
+func (p *Project) Equals(p2 *Project) bool {
+ return p.Name == p2.Name && p.Path == p2.Path && p.Remote == p2.Remote && p.Revision == p2.Revision
+}
+
+func ParseManifestFile(file string) (*Manifest, error) {
+ data, err := os.ReadFile(file)
+ if err != nil {
+ return nil, err
+ }
+ var m Manifest
+ err = xml.Unmarshal(data, &m)
+ return &m, err
+}
+
+func (m *Manifest) WriteFile(filePath string) error {
+ data, err := xml.MarshalIndent(m, "", " ")
+ if err != nil {
+ return err
+ }
+ data = append([]byte(xml.Header), data...)
+ return os.WriteFile(filePath, data, 0640)
+}
+
+func GetRepoUpdates(m1, m2 *Manifest) (updates []ProjectUpdate, err error) {
+ if _, err := m1.Standardize(); err != nil {
+ return nil, err
+ }
+ if _, err := m2.Standardize(); err != nil {
+ return nil, err
+ }
+ var j int
+ for i := 0; i < len(m1.Projects); {
+ if m2.Projects[j].Name == m1.Projects[i].Name {
+ if !m1.Projects[i].Equals(&m2.Projects[j]) {
+ logrus.Infof("%v changes", &m1.Projects[i])
+ updates = append(updates, ProjectUpdate{
+ P1: &m1.Projects[i],
+ P2: &m2.Projects[j],
+ })
+ }
+ i++
+ j++
+ } else if m2.Projects[j].Name > m1.Projects[i].Name {
+ logrus.Infof("%v removed", &m1.Projects[i])
+ updates = append(updates, ProjectUpdate{
+ P1: &m1.Projects[i],
+ P2: nil,
+ })
+ i++
+ } else { // m2.Projects[j].Name < m1.Projects[i].Name
+ logrus.Infof("%v added", &m2.Projects[j])
+ updates = append(updates, ProjectUpdate{
+ P1: nil,
+ P2: &m2.Projects[j],
+ })
+ j++
+ }
+ }
+ return
+}
+
+func (m *Manifest) UpdateManifestProject(name, path, remote, revision string, add bool) {
+ if name == "manifest" {
+ return
+ }
+ for i, p := range m.Projects {
+ if p.Name == name {
+ if path != "" {
+ m.Projects[i].Path = path
+ }
+ if remote != "" {
+ m.Projects[i].Remote = remote
+ }
+ if revision != "" {
+ m.Projects[i].Revision = revision
+ }
+ return
+ }
+ }
+ if add {
+ m.Projects = append(m.Projects, Project{Name: name, Path: path, Revision: revision, Remote: remote})
+ }
+}
+
+func (m *Manifest) RemoveManifestProject(name string) {
+ for i, p := range m.Projects {
+ if p.Name == name {
+ m.Projects = append(m.Projects[:i], m.Projects[i:]...)
+ return
+ }
+ }
+}
+
+func (m *Manifest) Standardize() (string, error) {
+ sort.Slice(m.Projects, func(i, j int) bool {
+ return m.Projects[i].Name < m.Projects[j].Name
+ })
+ data, err := xml.MarshalIndent(m, "", " ")
+ if err != nil {
+ return "", err
+ }
+ data = append([]byte(xml.Header), data...)
+ sumByte := md5.Sum(data)
+ return fmt.Sprintf("%X", sumByte), nil
+}
diff --git a/tools/rom_ram_analyzer/lite_small/README.md b/tools/rom_ram_analyzer/lite_small/README.md
index 859aac6cdeef85ed1795f289e35e7c8c20207fe0..468a5129fc8ab62267c4861d2de0ba853d0bc746 100644
--- a/tools/rom_ram_analyzer/lite_small/README.md
+++ b/tools/rom_ram_analyzer/lite_small/README.md
@@ -15,7 +15,7 @@
## 代码思路
-1. 扫描BUILD.gn文件,收集各个target的编译产物及其对应的component_name, subsystem_name信息,并存储到config.yaml中的gn_info_file字段指定的json文件中
+1. 扫描BUILD.gn文件,收集各个target的编译产物及其对应的component_name, subsystem_name信息,并存储到config.yaml中的gn_info_file字段指定的json文件中。如果BUILD.gn中没有查找到,则直接使用get_subsytem_component.py中预先收集好的数据(根据bundle.json)
2. 根据配置文件config.yaml扫描产品的编译产物目录,得到真实的编译产物信息(主要是大小)
3. 用真实的编译产物与从BUILD.gn中收集的信息进行匹配,从而得到编译产物-大小-所属部件的对应信息
4. 如果匹配失败,会直接利用grep到项目路径下进行模糊搜索,取出现次数top1的BUILD.gn,并根据该BUILD.gn文件去查找子系统和部件
@@ -27,6 +27,7 @@
1. 关于NOTFOUND:表示对应的编译产物没有在BUILD.gn的扫描结果中匹配(包括模糊匹配)到
1. 本工具是基于gn的template进行匹配,如果新增了自定义的template,则需要相应在代码中进行配置
1. 由于本工具是进行的静态扫描,且部分gn文件中使用了较为复杂的gn语法,因此本工具的**准确率无法达到100%,结果仅供参考**
+1. rk3568因为主要使用的是自定义的template,所以能够在编译阶段收集更多有效信息,因此建议使用standard目录下的脚本进行分析
**子系统及部件的查找过程**
@@ -51,18 +52,17 @@
xlwt==1.3.0
```
-1. `python3 rom_analysis.py --product_name {your_product_name} --oh_path {root_path_of_oh} [-g] [-s]`运行代码,其中-g表示直接使用上次扫描的BUILD.gn的结果,-s表示直接使用已有的子系统和部件信息,默认都会重新扫描.eg: `python3 rom_analysis.py --product_name ipcamera_hispark_taurus`.
+1. `python3 rom_analysis.py --product_name {your_product_name} --oh_path {root_path_of_oh} [-g] [-s] [-b]`运行代码,其中-g表示直接使用上次扫描的BUILD.gn的结果,-s表示直接使用已有的子系统和部件信息,此二者默认都会重新扫描, -b表示在结果中添加各部件的baseline信息(根据bundle.json).eg: `python3 rom_analysis.py --product_name ipcamera_hispark_taurus -b`.
1. 运行完毕会产生4个json文件及一个xls文件,如果是默认配置,各文件描述如下:
- gn_info.json:BUILD.gn的分析结果
- sub_com_info.json:从bundle.json中进行分析获得的各部件及其对应根目录的信息
- {product_name}_product.json:该产品实际的编译产物信息,根据config.yaml进行收集
- - {product_name}_result.json:各部件的rom大小分析结果
- - {product_name}_result.xls:各部件的rom大小分析结果
+ - **{product_name}_result.json:各部件的rom大小分析结果**
+ - **{product_name}_result.xls:各部件的rom大小分析结果**
+ - rom_ram_baseline.json:各部件在bundle.json中定义的rom和ram的基线
## 新增对产品的支持
-*rk3568因为主要使用的是自定义的template,所以能够在编译阶段收集更多有效信息,因此建议使用standard目录下的脚本进行分析*
-
在config.yaml中进行配置即可,格式说明如下:
```yaml
ipcamera_hispark_taurus: # 产品名称,需要和命令行参数中的-p参数一致
@@ -96,7 +96,4 @@ ipcamera_hispark_taurus: # 产品名称,需要和命令行参数中的-p参数
## 如何提高准确率
1. 如果已知编译产物不可能从某些目录下的BUILD.gn产生,则可以将其对应目录加入到config.yaml的black_list,从而不对该目录下的BUILD.gn进行扫描,以减少出错概率
-
-## 后续工作
-
-1. 对target(xxx,yyy)中,xxx/yyy为变量的情况可进一步优化
+1. 对于已知检测错误的或NOTFOUND的编译产物,如果知道其正确的部件和子系统,可在config.yaml中的manual_config进行配置
diff --git a/tools/rom_ram_analyzer/lite_small/pkgs/basic_tool.py b/tools/rom_ram_analyzer/lite_small/pkgs/basic_tool.py
index 512ed597072bdb0476d7bac3e065f480f567dfbe..421b3cf2ced03f3aced17f564170f168d809bde2 100644
--- a/tools/rom_ram_analyzer/lite_small/pkgs/basic_tool.py
+++ b/tools/rom_ram_analyzer/lite_small/pkgs/basic_tool.py
@@ -20,6 +20,45 @@ import os
import re
import glob
from typing import *
+import unittest
+
+__all__ = ["translate_str_unit", "BasicTool", "do_nothing", "get_unit", "unit_adaptive"]
+
+
+def unit_adaptive(size: int) -> str:
+ unit_list = ["Byte", "KB", "MB", "GB"]
+ index = 0
+ while index < len(unit_list) and size >= 1024:
+ size /= 1024
+ index += 1
+ if index == len(unit_list):
+ index = len(unit_list)-1
+ size *= 1024
+ return str(round(size))+unit_list[index]
+
+def get_unit(x: str) -> str:
+ pattern = r"[a-z|A-Z]*$"
+ unit = re.search(pattern, x).group()
+ return unit
+
+
+def translate_str_unit(x: str, dest: str, prefix: str = "~") -> float:
+ src_unit = get_unit(x)
+ trans_dict: Dict[str, int] = {
+ "Byte": 1,
+ "byte": 1,
+ "KB": 1024,
+ "kb": 1024,
+ "MB": 1024*1024,
+ "M": 1024*1024,
+ "GB": 1024*1024*1024,
+ "G": 1024*1024*1024
+ }
+ if src_unit not in trans_dict.keys():
+ raise Exception(
+ f"unsupport unit: {src_unit}. only support {list(trans_dict.keys())}")
+ x = float(x.lstrip(prefix).rstrip(src_unit))
+ return round(x*(trans_dict.get(src_unit)/trans_dict.get(dest)), 2)
def do_nothing(x: Any) -> Any:
@@ -136,11 +175,3 @@ class BasicTool:
output = os.popen(cmd).read()
output = post_processor(output)
return output
-
-
-if __name__ == '__main__':
- res = BasicTool.grep_ern("^( *)ohos_prebuilt_shared_library", "/home/aodongbiao/oh", include="BUILD.gn", exclude=("/home/aodongbiao/oh/out","doc", ".ccache"), post_handler=lambda x: x.split('\n'))
- # print(res)
- for i in res:
- if "oh/out" in i:
- print(i)
diff --git a/tools/rom_ram_analyzer/lite_small/pkgs/gn_common_tool.py b/tools/rom_ram_analyzer/lite_small/pkgs/gn_common_tool.py
index 5293400ad173b001e29f3a8d47111fe2c85e4da1..61c74709789f2e1fdb8f7df030f2036ca1f3f9f4 100644
--- a/tools/rom_ram_analyzer/lite_small/pkgs/gn_common_tool.py
+++ b/tools/rom_ram_analyzer/lite_small/pkgs/gn_common_tool.py
@@ -203,78 +203,10 @@ class GnVariableParser:
"""
result = BasicTool.re_group_1(
content, r"{} *= *(\[.*?\])".format(var), flags=re.S | re.M)
- result = ast.literal_eval(result.strip())
- return result
-
-
-if __name__ == '__main__':
- cc = \
- """
- target("shared_library", "mmp"){
- xxx
- }
-
- ohos_shared_library("pinauthservice") {
- sources = [
- "//base/useriam/pin_auth/services/modules/driver/src/pin_auth_driver_hdi.cpp",
- "//base/useriam/pin_auth/services/modules/driver/src/pin_auth_interface_adapter.cpp",
- "//base/useriam/pin_auth/services/modules/executors/src/pin_auth_executor_callback_hdi.cpp",
- "//base/useriam/pin_auth/services/modules/executors/src/pin_auth_executor_hdi.cpp",
- "//base/useriam/pin_auth/services/modules/inputters/src/i_inputer_data_impl.cpp",
- "//base/useriam/pin_auth/services/modules/inputters/src/pin_auth_manager.cpp",
- "//base/useriam/pin_auth/services/sa/src/pin_auth_service.cpp",
- ]
-
- configs = [
- ":pin_auth_services_config",
- "//base/useriam/user_auth_framework/common:iam_log_config",
- "//base/useriam/user_auth_framework/common:iam_utils_config",
- ]
-
- deps = [
- "//base/useriam/pin_auth/frameworks:pinauth_ipc",
- "//third_party/openssl:libcrypto_shared",
- ]
-
- external_deps = [
- "access_token:libaccesstoken_sdk",
- "c_utils:utils",
- "drivers_interface_pin_auth:libpin_auth_proxy_1.0",
- "hisysevent_native:libhisysevent",
- "hiviewdfx_hilog_native:libhilog",
- "ipc:ipc_core",
- "safwk:system_ability_fwk",
- "user_auth_framework:userauth_executors",
- ]
- t = [
- 1,
- 2,
- 3
- ]
- tt = [
- aaa,
- bbb,
- ccc
- ]
- remove_configs = [ "//build/config/compiler:no_exceptions" ]
-
- subsystem_name = "useriam"
- part_name = "pin_auth"
- }"""
- s = """
-updater_usb_init_cfg_path = "//base/startup/init/services/etc/init.usb.cfg"
-updater_init_usb_configfs_path_cfg =
- "//drivers/peripheral/usb/cfg/init.usb.configfs.cfg"
-updater_faultloggerd_cfg =
-"//base/hiviewdfx/faultloggerd/services/config/faultloggerd.cfg"
-updater_hilog_cfg = "//base/hiviewdfx/hilog/services/hilogd/etc/hilogd.cfg"
-
-ohos_prebuilt_etc("updater_hilog.cfg") {
-source = "${updater_hilog_cfg}"
-install_images = [ "updater" ]
-part_name = "updater"
-}
-"""
- s = "\"${updater_faultloggerd_cfg}\""
- print(GnCommonTool.contains_gn_variable(s))
- ...
+ result_list = list()
+ for item in result.lstrip('[').rstrip(']').split('\n'):
+ item = item.strip().strip(',"')
+ if not item:
+ continue
+ result_list.append(item)
+ return result_list
diff --git a/tools/rom_ram_analyzer/lite_small/pkgs/rom_ram_baseline_collector.py b/tools/rom_ram_analyzer/lite_small/pkgs/rom_ram_baseline_collector.py
new file mode 100644
index 0000000000000000000000000000000000000000..66b22d260547966efdcd4035fd0d576566b09855
--- /dev/null
+++ b/tools/rom_ram_analyzer/lite_small/pkgs/rom_ram_baseline_collector.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file is to collect baseline information (according to bundle.json)
+
+if __name__ == '__main__':
+ from basic_tool import BasicTool
+else:
+ from pkgs.basic_tool import BasicTool
+from typing import Dict, Any
+import json
+import logging
+
+class RomRamBaselineCollector:
+ """collect baseline of rom and ram from bundle.json
+ """
+ @classmethod
+ def _put(cls, result_dict: Dict, subsystem_name: str, component_name: str, rom_size: str, ram_size: str, bundle_path:str) -> None:
+ if not result_dict.get(subsystem_name):
+ result_dict[subsystem_name] = dict()
+ result_dict[subsystem_name][component_name] = dict()
+ result_dict[subsystem_name][component_name]["rom"] = rom_size
+ result_dict[subsystem_name][component_name]["ram"] = ram_size
+ result_dict[subsystem_name][component_name]["bundle.json"] = bundle_path
+
+ @classmethod
+ def collect(cls, oh_path: str) -> Dict[str, Dict]:
+ """
+ 从bundle.json文件中收集rom、ram的baseline信息
+ 返回结果:
+ "subsystem_name":{
+ "component_name":{
+ "rom":"123KB, # the value may be "" or null
+ "ram":"234KB"
+ }
+ }
+ """
+ def post_handler(x:str)->list:
+ x = x.split("\n")
+ y = [item for item in x if item]
+ return y
+ bundle_list = BasicTool.execute(cmd=f"find {oh_path} -name bundle.json", post_processor=post_handler)
+ rom_ram_baseline_dict: Dict[str, Dict] = dict()
+ for bundle in bundle_list:
+ with open(bundle, 'r', encoding='utf-8') as f:
+ content: Dict[str, Any] = json.loads(f.read())
+ component_info = content.get("component")
+ if not component_info:
+ logging.warning(f"{bundle} has no field of 'component'.")
+ continue
+ component_name = component_info.get("name")
+ subsystem_name = component_info.get("subsystem")
+ rom_baseline = component_info.get("rom")
+ ram_baseline = component_info.get("ram")
+ if not (subsystem_name or rom_baseline or ram_baseline):
+ logging.warning(f"subsystem=\"{subsystem_name}\", rom=\"{rom_baseline}\", ram=\"{ram_baseline}\" in {bundle}")
+ cls._put(rom_ram_baseline_dict, subsystem_name, component_name, rom_baseline, ram_baseline, bundle)
+ return rom_ram_baseline_dict
\ No newline at end of file
diff --git a/tools/rom_ram_analyzer/lite_small/pkgs/simple_excel_writer.py b/tools/rom_ram_analyzer/lite_small/pkgs/simple_excel_writer.py
index da6f3c6e1b9a1ce513e8a8867614c4acc9a2ec92..81f44316f0b59108b1198565f3af63229145a8ee 100644
--- a/tools/rom_ram_analyzer/lite_small/pkgs/simple_excel_writer.py
+++ b/tools/rom_ram_analyzer/lite_small/pkgs/simple_excel_writer.py
@@ -122,15 +122,3 @@ class SimpleExcelWriter:
def save(self, file_name: str):
self.__book.save(file_name)
-
-
-if __name__ == '__main__':
- writer = SimpleExcelWriter(default_sheet_name="first")
- writer.add_sheet("second")
- writer.add_sheet("third")
- writer.set_sheet_header(["h", "m", "n"])
- writer.append_line([1, 2, 3])
- writer.append_line([2, 3, 4], "second")
- writer.append_line([3, 4, 5], "third")
- writer.append_line([3, 2, 1])
- writer.save("demo.xls")
diff --git a/tools/rom_ram_analyzer/lite_small/pkgs/simple_yaml_tool.py b/tools/rom_ram_analyzer/lite_small/pkgs/simple_yaml_tool.py
index 32cb530bc6026ed2c44a9662170fb378dcb15cf2..084e45e9d9c0f7f534be2b7d05fc470cdcf729d4 100644
--- a/tools/rom_ram_analyzer/lite_small/pkgs/simple_yaml_tool.py
+++ b/tools/rom_ram_analyzer/lite_small/pkgs/simple_yaml_tool.py
@@ -24,9 +24,4 @@ class SimpleYamlTool:
@classmethod
def read_yaml(cls, file_name: str, mode: str = "r", encoding: str = "utf-8") -> Dict:
with open(file_name, mode, encoding=encoding) as f:
- return yaml.load(f, Loader=SafeLoader)
-
-
-if __name__ == '__main__':
- config = SimpleYamlTool.read_yaml("/home/aodongbiao/build_static_check/tools/component_tools/rom_ram_analyzer/src/config.yaml")
- print(config["black_grep_dir"])
\ No newline at end of file
+ return yaml.load(f, Loader=SafeLoader)
\ No newline at end of file
diff --git a/tools/rom_ram_analyzer/lite_small/src/config.py b/tools/rom_ram_analyzer/lite_small/src/config.py
index a557af603fa1d3cc3fe226734cfd12bb30bfbf7b..be7819575f9b7cba03bf150721f05b67cd48f29b 100644
--- a/tools/rom_ram_analyzer/lite_small/src/config.py
+++ b/tools/rom_ram_analyzer/lite_small/src/config.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# This file is a configuration for scaning the BUILD.gn to collection compile
+# This file is a configuration for scaning the BUILD.gn to collection compile
# products.
@@ -43,23 +43,34 @@ def parse_args():
parser.add_argument("-o", "--oh_path", type=str,
default=".", help="root path of openharmony")
parser.add_argument("-g", "--recollect_gn",
- action="store_false", help="recollect gn info or not")
+ action="store_false", help="recollect gn info or not(-g)")
parser.add_argument("-s", "--recollect_sc", action="store_false",
- help="recollect subsystem_component info or not")
+ help="recollect subsystem_component info or not(-s)")
+ parser.add_argument("-b", "--baseline", action="store_true",
+ help="add baseline of component to the result(-b) or not.")
+ parser.add_argument("-u", "--unit_adaptive",
+ action="store_true", help="unit adaptive")
+ parser.add_argument("-f", "--output_file", type=str, default="rom_analysis_result",
+ help="basename of output file, default: rom_analysis_result. eg: rom_analysis_result")
args = parser.parse_args()
return args
-logging.basicConfig(level=logging.INFO)
+logging.basicConfig(level=logging.NOTSET)
_args = parse_args()
# # global variables
configs = SimpleYamlTool.read_yaml("config.yaml")
result_dict: Dict[str, Any] = dict()
-
-project_path = BasicTool.abspath(_args.oh_path)
product_name = _args.product_name
+if product_name not in configs.keys():
+ print(f"error: product_name '{product_name}' illegal")
+ exit(-1)
+project_path = BasicTool.abspath(_args.oh_path)
recollect_gn = _args.recollect_gn
+baseline = _args.baseline
+unit_adapt = _args.unit_adaptive
+output_file = _args.output_file
_recollect_sc = _args.recollect_sc
_sc_json: Dict[Text, Text] = configs.get("subsystem_component")
_sc_save = _sc_json.get("save")
@@ -237,13 +248,41 @@ collector_config: Tuple[BaseProcessor] = (
},
unit_post_handler=DefaultPostHandler(),
ud_post_handler=TargetS2MPostHandler
- )
+ ),
+ DefaultProcessor(project_path=project_path,
+ result_dict=result_dict,
+ target_type=_target_type[14],
+ match_pattern=fr"^( *){_target_type[14]}\(.*?\)",
+ sub_com_dict=sub_com_dict,
+ target_name_parser=TargetNameParser.single_parser,
+ other_info_handlers={
+ "output_extension":extension_handler
+ },
+ unit_post_handler=UnittestPostHandler(),
+ ),
+ DefaultProcessor(project_path=project_path,
+ result_dict=result_dict,
+ target_type=_target_type[15],
+ match_pattern=fr"^( *){_target_type[15]}\(.*?\)",
+ sub_com_dict=sub_com_dict,
+ target_name_parser=TargetNameParser.single_parser,
+ other_info_handlers={
+ "hap_name": hap_name_handler,
+ "mode": mod_handler,
+ },
+ unit_post_handler=HapPackPostHandler(),
+ ),
+ ListResourceProcessor(project_path=project_path,
+ result_dict=result_dict,
+ target_type=_target_type[16],
+ match_pattern=fr"^( *){_target_type[16]}\(.*?\)",
+ sub_com_dict=sub_com_dict,
+ target_name_parser=TargetNameParser.single_parser,
+ other_info_handlers={
+ },
+ unit_post_handler=DefaultPostHandler(),
+ resource_field="sources"
+ ),
)
__all__ = ["configs", "result_dict", "collector_config", "sub_com_dict"]
-
-if __name__ == '__main__':
- for c in collector_config:
- c.run()
- with open("demo.json", 'w', encoding='utf-8') as f:
- json.dump(result_dict, f)
diff --git a/tools/rom_ram_analyzer/lite_small/src/config.yaml b/tools/rom_ram_analyzer/lite_small/src/config.yaml
index f3394b2bc210a5392e27ec0fd0cc0c805c82d9a0..cfeabc9b13a536e184012bd60a89e1eae9fcd307 100644
--- a/tools/rom_ram_analyzer/lite_small/src/config.yaml
+++ b/tools/rom_ram_analyzer/lite_small/src/config.yaml
@@ -12,6 +12,7 @@
# limitations under the License.
+
# 注意:如果target_type有了更改,要相应改变config.py中collector_config
target_type:
- shared_library
@@ -28,8 +29,10 @@ target_type:
- ohos_prebuilt_shared_library
- lite_component
- target
+ - unittest
+ - hap_pack
+ - copy
-
subsystem_component:
save: true
filename: sub_com_info.json
@@ -44,12 +47,12 @@ default_extension:
default_prefix:
shared_library: lib
static_library: lib
+
# black list for command 'grep', it should be the subdirectory of the root path of oh
black_list:
- .repo
- .ccache
- doc
- # - test
- build
# 排除out目录,为了避免排除其他路径下的out目录,这里详细列出了out下的所有目录
- out/gen
@@ -71,6 +74,7 @@ ipcamera_hispark_taurus:
bin: bin
so: usr/lib
etc: etc
+ hap: system/internal
rest: True # 是否将上面root目录下除了relative指定的目录归到etc并进行匹配
query_order: # 匹配顺序,key应当何relative字段中的key一致,value应当在上面的target_type字段中
so:
@@ -80,11 +84,20 @@ ipcamera_hispark_taurus:
- lite_library
- lite_component
- target
-
+ - unittest
bin:
- executable
- ohos_executable
- lite_component
+ - unittest
+ hap:
+ - hap_pack
+ # 对于脚本无法自动检测或误测部分,在这里手动配置
+ # manual_config:
+ # usr/lib/libplugin_demuxer_ffmpeg.so: # start with out
+ # subsystem: test_manual_config # [required]
+ # component: test_manual_config # [required]
+ # other_detail_info_key: other_detail_info_value # [optional]
ipcamera_hispark_taurus_linux:
@@ -96,6 +109,7 @@ ipcamera_hispark_taurus_linux:
bin: bin
so: usr/lib
etc: etc
+ hap: system/internal
rest: True
query_order:
so:
@@ -105,11 +119,15 @@ ipcamera_hispark_taurus_linux:
- lite_library
- lite_component
- target
-
+ - unittest
+ hap:
+ - hap_pack
bin:
- executable
- ohos_executable
- lite_component
+ - unittest
+
hispark_taurus_mini_system:
product_infofile: hispark_taurus_mini_system_product.json
@@ -129,11 +147,12 @@ hispark_taurus_mini_system:
- lite_library
- lite_component
- target
-
+ - unittest
bin:
- executable
- ohos_executable
- lite_component
+ - unittest
wifiiot_hispark_pegasus:
product_infofile: wifiiot_hispark_pegasus_product.json
@@ -167,7 +186,7 @@ hispark_pegasus_mini_system:
- lite_library
- target
-rk3568_mini_system: # rk的目前从packages/phone/system_module_info.json中分析准确度更高,因为rk基本都使用的是ohos_xxx,而lite/small的更多的是使用的gn原生target template
+rk3568: # rk的目前从packages/phone/system_module_info.json中分析准确度更高,因为rk基本都使用的是ohos_xxx,而lite/small的更多的是使用的gn原生target template
product_infofile: rk3568_mini_system_product.json
output_name: rk3568_mini_system_result.json
product_dir:
@@ -186,12 +205,13 @@ rk3568_mini_system: # rk的目前从packages/phone/system_module_info.json中分
- lite_library
- lite_component
- target
-
+ - unittest
bin:
- ohos_executable
- executable
- lite_component
-
+ - unittest
hap:
- ohos_hap
+ - hap_pack
diff --git a/tools/rom_ram_analyzer/lite_small/src/get_subsystem_component.py b/tools/rom_ram_analyzer/lite_small/src/get_subsystem_component.py
index 2dbd476722d5b6360650840a19e67e196a472c8e..2620c7fbe050edb0b85fc5e982a239b32aeaff4b 100644
--- a/tools/rom_ram_analyzer/lite_small/src/get_subsystem_component.py
+++ b/tools/rom_ram_analyzer/lite_small/src/get_subsystem_component.py
@@ -139,10 +139,3 @@ class SC:
__all__ = ["SC"]
-
-if __name__ == '__main__':
- ohos_path, output_path = parse_args()
- # info = get_subsystem_components_modified(ohos_path)
- # export_to_json(info, output_path)
- # print_warning_info()
- SC.run(ohos_path, output_path)
diff --git a/tools/rom_ram_analyzer/lite_small/src/misc.py b/tools/rom_ram_analyzer/lite_small/src/misc.py
index 79af85b24908635072d1c8dd9cf9cfd0d933a2de..c7a7a6f99848cf53550c01e332a11dfd635b8c96 100644
--- a/tools/rom_ram_analyzer/lite_small/src/misc.py
+++ b/tools/rom_ram_analyzer/lite_small/src/misc.py
@@ -49,6 +49,10 @@ def target_type_handler(paragraph: Text):
return tt
+def mod_handler(paragraph: Text):
+ return GnVariableParser.string_parser("mode", paragraph).strip('"')
+
+
"""
===============gn lineno collector===============
"""
@@ -134,9 +138,29 @@ def add_postfix(content: str, postfix: str) -> str:
class DefaultPostHandler(BasePostHandler):
def run(self, unit: Dict[str, AnyStr]):
+ if "extension" in unit.keys() and (not unit["output_name"].endswith(unit["extension"])):
+ out = unit["output_name"].rstrip(
+ ".")+"."+unit["extension"].lstrip(".")
+ return out
return unit["output_name"]
+class UnittestPostHandler(BasePostHandler):
+ def run(self, unit: Dict[str, AnyStr]):
+ if "output_extension" in unit.keys() and (not unit["output_name"].endswith(unit["output_extension"])):
+ out = unit["output_name"].rstrip(
+ ".")+"."+unit["output_extension"].lstrip(".")
+ return out
+ return unit["output_name"]
+
+
+class HapPackPostHandler(BasePostHandler):
+ def run(self, unit: Dict[str, AnyStr]):
+ hap_name = unit.get("hap_name")
+ mode = unit.get("mode")
+ return hap_name + "." + mode
+
+
class HAPPostHandler(BasePostHandler):
"""
for ohos_hap"""
diff --git a/tools/rom_ram_analyzer/lite_small/src/rom_analysis.py b/tools/rom_ram_analyzer/lite_small/src/rom_analysis.py
index f827b03e039b3daf5c8bcddfd503ef8cbb63a792..bd2e91f9e1f5d7e223b031fdf830b11fcf57a7e9 100644
--- a/tools/rom_ram_analyzer/lite_small/src/rom_analysis.py
+++ b/tools/rom_ram_analyzer/lite_small/src/rom_analysis.py
@@ -29,10 +29,11 @@ from threading import RLock
import collections
from config import result_dict, collector_config, configs, \
- project_path, sub_com_dict, product_name, recollect_gn
-from pkgs.basic_tool import BasicTool
+ project_path, sub_com_dict, product_name, recollect_gn, baseline, unit_adapt, output_file
+from pkgs.basic_tool import BasicTool, unit_adaptive
from pkgs.gn_common_tool import GnCommonTool
from pkgs.simple_excel_writer import SimpleExcelWriter
+from pkgs.rom_ram_baseline_collector import RomRamBaselineCollector
from misc import gn_lineno_collect
@@ -87,7 +88,7 @@ class RomAnalysisTool:
if (not sub_path) or (os.sep not in sub_path):
return
# 将其他目录添加到dir_list
- t, sub_sub_path = sub_path.split(os.sep, 1) # 如果是c/e,分割成c,e
+ t, sub_sub_path = sub_path.split(os.sep, 1) # 如果是c/e,分割成c,e
t = os.path.join(rela_path, t)
if t in dir_list:
dir_list.remove(t)
@@ -106,7 +107,7 @@ class RomAnalysisTool:
logging.error(
f"product_name '{product_name}' not found in the config.yaml")
exit(1)
- product_path_dit: Dict[str, str] = dict() # 存储编译产物的类型及目录
+ product_path_dit: Dict[str, str] = dict() # 存储编译产物的类型及目录
root_dir = product_dir.get("root")
root_dir = os.path.join(project_path, root_dir)
relative_dir: Dict[str, str] = product_dir.get("relative")
@@ -149,9 +150,9 @@ class RomAnalysisTool:
return product_dict
@classmethod
- def _put(cls, sub: str, com: str, unit: Dict, rom_size_dict: Dict):
+ def _put(cls, sub: str, com: str, unit: Dict, rom_size_dict: Dict, com_size_baseline: str = str()):
size = unit.get("size")
- if not rom_size_dict.get("size"): # 总大小
+ if not rom_size_dict.get("size"): # 总大小
rom_size_dict["size"] = 0
if not rom_size_dict.get(sub): # 子系统大小
rom_size_dict[sub]: Dict[str, Dict] = dict()
@@ -164,6 +165,10 @@ class RomAnalysisTool:
rom_size_dict[sub][com]["size"] = 0
rom_size_dict[sub][com]["count"] = 0
+ if (sub != "NOTFOUND" and sub != "UNDEFINED" and com != "NOTFOUND" and com != "UNDEFINED") \
+ and (not rom_size_dict.get(sub).get(com).get("baseline")) and baseline:
+ rom_size_dict[sub][com]["baseline"] = com_size_baseline
+
rom_size_dict[sub][com]["filelist"].append(unit)
rom_size_dict[sub][com]["size"] += size
rom_size_dict[sub][com]["count"] += 1
@@ -174,6 +179,7 @@ class RomAnalysisTool:
@classmethod
def _fuzzy_match(cls, file_name: str, filter_path_keyword: Tuple[str] = tuple()) -> Tuple[str, str, str]:
"""
+ TODO 应当先遍历gn_info进行匹配
直接grep,利用出现次数最多的BUILD.gn去定位subsystem_name和component_name"""
logging.info(f"fuzzy match: {file_name}")
_, base_name = os.path.split(file_name)
@@ -228,10 +234,13 @@ class RomAnalysisTool:
return str(), str(), str()
@classmethod
- def _save_as_xls(cls, result_dict: Dict, product_name: str) -> None:
+ def _save_as_xls(cls, result_dict: Dict, product_name: str, baseline: bool) -> None:
logging.info("saving as xls...")
header = ["subsystem_name", "component_name",
"output_file", "size(Byte)"]
+ if baseline:
+ header = ["subsystem_name", "component_name", "baseline",
+ "output_file", "size(Byte)"]
tmp_dict = copy.deepcopy(result_dict)
excel_writer = SimpleExcelWriter("rom")
excel_writer.set_sheet_header(headers=header)
@@ -241,7 +250,9 @@ class RomAnalysisTool:
component_start_row = 1
component_end_row = 0
component_col = 1
- del tmp_dict["size"]
+ baseline_col = 2
+ if "size" in tmp_dict.keys():
+ del tmp_dict["size"]
for subsystem_name in tmp_dict.keys():
subsystem_dict = tmp_dict.get(subsystem_name)
subsystem_size = subsystem_dict.get("size")
@@ -255,6 +266,9 @@ class RomAnalysisTool:
component_name)
component_size = component_dict.get("size")
component_file_count = component_dict.get("count")
+ component_baseline = component_dict.get("baseline")
+ if component_baseline:
+ del component_dict["baseline"]
del component_dict["count"]
del component_dict["size"]
component_end_row += component_file_count
@@ -262,10 +276,17 @@ class RomAnalysisTool:
for fileinfo in component_dict.get("filelist"):
file_name = fileinfo.get("file_name")
file_size = fileinfo.get("size")
- excel_writer.append_line(
- [subsystem_name, component_name, file_name, file_size])
+ line = [subsystem_name, component_name,
+ file_name, file_size]
+ if baseline:
+ line = [subsystem_name, component_name,
+ component_baseline, file_name, file_size]
+ excel_writer.append_line(line)
excel_writer.write_merge(component_start_row, component_col, component_end_row, component_col,
component_name)
+ if baseline:
+ excel_writer.write_merge(component_start_row, baseline_col, component_end_row, baseline_col,
+ component_baseline)
component_start_row = component_end_row + 1
excel_writer.write_merge(subsystem_start_row, subsystem_col, subsystem_end_row, subsystem_col,
subsystem_name)
@@ -275,21 +296,93 @@ class RomAnalysisTool:
excel_writer.save(output_name)
logging.info("save as xls success.")
- @ classmethod
- def analysis(cls, product_name: str, product_dict: Dict[str, List[str]]):
- logging.info("start analyzing...")
- gn_info_file = configs["gn_info_file"]
- with open(gn_info_file, 'r', encoding='utf-8') as f:
- gn_info = json.load(f)
- query_order: Dict[str, List[str]
- ] = configs[product_name]["query_order"]
- query_order["etc"] = configs["target_type"] # etc会查找所有的template
- rom_size_dict: Dict = dict()
+ @classmethod
+ def _result_unit_adaptive(cls, result_dict: Dict[str, Dict]) -> None:
+ total_size = unit_adaptive(result_dict["size"])
+ del result_dict["size"]
+ for subsystem_name, subsystem_info in result_dict.items():
+ sub_size = unit_adaptive(subsystem_info["size"])
+ count = subsystem_info["count"]
+ del subsystem_info["size"]
+ del subsystem_info["count"]
+ for component_name, component_info in subsystem_info.items():
+ component_info["size"] = unit_adaptive(component_info["size"])
+ subsystem_info["size"] = sub_size
+ subsystem_info["count"] = count
+ result_dict["size"] = total_size
+
+ @classmethod
+ def _match_manual_configured(cls, manual_config_info: Dict[str, Dict], compiled_files: Dict[str, List], compiled_root_path: str, result_dict: Dict[str, Dict]) -> None:
+ for file_path, file_info in manual_config_info.items():
+ full_path = os.path.join(
+ project_path, compiled_root_path, file_path)
+ if not os.path.isfile(full_path):
+ logging.warning(f"config error: {file_path} is not a file.")
+ continue
+ file_info["size"] = os.path.getsize(full_path)
+ file_info["file_name"] = full_path
+ cls._put(file_info["subsystem"],
+ file_info["component"], file_info, result_dict)
+ for _, v in compiled_files.items():
+ if full_path not in v:
+ continue
+ index = v.index(full_path)
+ del v[index]
+ break
+
+ @classmethod
+ def _iterate_all_template_type(cls, type_list: List[str], gn_info: Dict, gn_info_file: str, base_name: str, rom_ram_baseline: Dict, rom_size_dict: Dict, f: str, size: int):
+ find_flag = False
+ component_rom_baseline = None
+ for tn in type_list: # tn example: ohos_shared_library
+ if find_flag: # 如果已经在前面的template中找到了,后面的就不必再查找
+ break
+ output_dict: Dict[str, Dict] = gn_info.get(
+ tn) # 这个模板对应的所有可能编译产物
+ if not output_dict:
+ logging.warning(
+ f"'{tn}' not found in the {gn_info_file}")
+ continue
+ d = output_dict.get(base_name)
+ if not d:
+ continue
+ d["size"] = size
+ d["file_name"] = f.replace(project_path, "")
+ if rom_ram_baseline.get(d["subsystem_name"]) and rom_ram_baseline.get(d["subsystem_name"]).get(d["component_name"]):
+ component_rom_baseline = rom_ram_baseline.get(
+ d["subsystem_name"]).get(d["component_name"]).get("rom")
+ cls._put(d["subsystem_name"],
+ d["component_name"], d, rom_size_dict, component_rom_baseline)
+ find_flag = True
+ if not find_flag: # 如果指定序列中的template都没有查找到,则模糊匹配
+ # fuzzy match
+ psesudo_gn, sub, com = cls._fuzzy_match(f)
+ if sub and com:
+ if rom_ram_baseline.get(sub) and rom_ram_baseline.get(sub).get(com):
+ component_rom_baseline = rom_ram_baseline.get(
+ sub).get(com).get("baseline")
+ cls._put(sub, com, {
+ "subsystem_name": sub,
+ "component_name": com,
+ "psesudo_gn_path": psesudo_gn,
+ "description": "fuzzy match",
+ "file_name": f.replace(project_path, ""),
+ "size": size,
+ }, rom_size_dict, component_rom_baseline)
+ find_flag = True
+ if not find_flag: # 模糊匹配都没有匹配到的,归属到NOTFOUND
+ cls._put("NOTFOUND", "NOTFOUND", {
+ "file_name": f.replace(project_path, ""),
+ "size": size,
+ }, rom_size_dict)
+
+ @classmethod
+ def _subsystem_component_for_all_product_file(cls, product_dict: Dict[str, List[str]], query_order: Dict[str, List[str]],
+ gn_info: Dict, gn_info_file: str, rom_ram_baseline: Dict, rom_size_dict: Dict):
for t, l in product_dict.items():
for f in l: # 遍历所有文件
if os.path.isdir(f):
continue
- find_flag = False
type_list = query_order.get(t)
_, base_name = os.path.split(f)
size = os.path.getsize(f)
@@ -297,44 +390,43 @@ class RomAnalysisTool:
logging.warning(
f"'{t}' not found in query_order of the config.yaml")
break
- for tn in type_list: # tn example: ohos_shared_library
- if find_flag: # 如果已经在前面的template中找到了,后面的就不必再查找
- break
- output_dict: Dict[str, Dict] = gn_info.get(
- tn) # 这个模板对应的所有可能编译产物
- if not output_dict:
- logging.warning(
- f"'{tn}' not found in the {gn_info_file}")
- continue
- d = output_dict.get(base_name)
- if not d:
- continue
- d["size"] = size
- d["file_name"] = f.replace(project_path, "")
- cls._put(d["subsystem_name"],
- d["component_name"], d, rom_size_dict)
- find_flag = True
- if not find_flag: # 如果指定序列中的template都没有查找到,则模糊匹配
- # fuzzy match
- psesudo_gn, sub, com = cls._fuzzy_match(f)
- if sub and com:
- cls._put(sub, com, {
- "subsystem_name": sub,
- "component_name": com,
- "psesudo_gn_path": psesudo_gn,
- "description": "fuzzy match",
- "file_name": f.replace(project_path, ""),
- "size": size,
- }, rom_size_dict)
- find_flag = True
- if not find_flag: # 模糊匹配都没有匹配到的,归属到NOTFOUND
- cls._put("NOTFOUND", "NOTFOUND", {
- "file_name": f.replace(project_path, ""),
- "size": size,
- }, rom_size_dict)
- with open(configs[product_name]["output_name"], 'w', encoding='utf-8') as f:
+ cls._iterate_all_template_type(
+ type_list, gn_info, gn_info_file, base_name, rom_ram_baseline, rom_size_dict, f, size)
+
+ @classmethod
+ def analysis(cls, product_name: str, product_dict: Dict[str, List[str]], output_file_name: str):
+ """analysis the rom of lite/small product
+
+ Args:
+ product_name (str): product name configured in the yaml
+ product_dict (Dict[str, List[str]]): result dict of compiled product file
+ format:
+ "bin":[...],
+ "so":[...]
+ ...
+ """
+ logging.info("start analyzing...")
+ rom_ram_baseline: Dict[str, Dict] = RomRamBaselineCollector.collect(
+ project_path)
+ with open("rom_ram_baseline.json", 'w', encoding='utf-8') as f:
+ json.dump(rom_ram_baseline, f, indent=4)
+ gn_info_file = configs["gn_info_file"] # filename to save gn_info
+ with open(gn_info_file, 'r', encoding='utf-8') as f:
+ gn_info = json.load(f)
+ query_order: Dict[str, List[str]
+ ] = configs[product_name]["query_order"] # query order of the gn template to be matched
+ query_order["etc"] = configs["target_type"] # etc会查找所有的template
+ rom_size_dict: Dict = dict()
+ if "manual_config" in configs[product_name].keys():
+ cls._match_manual_configured(
+ configs[product_name]["manual_config"], product_dict, configs[product_name]["product_dir"]["root"], rom_size_dict)
+ cls._subsystem_component_for_all_product_file(
+ product_dict, query_order, gn_info, gn_info_file, rom_ram_baseline, rom_size_dict)
+ if unit_adapt:
+ cls._result_unit_adaptive(rom_size_dict)
+ with open(output_file_name + ".json", 'w', encoding='utf-8') as f:
json.dump(rom_size_dict, f, indent=4)
- cls._save_as_xls(rom_size_dict, product_name)
+ cls._save_as_xls(rom_size_dict, product_name, baseline)
logging.info("success")
@@ -343,7 +435,7 @@ def main():
RomAnalysisTool.collect_gn_info()
product_dict: Dict[str, List[str]
] = RomAnalysisTool.collect_product_info(product_name)
- RomAnalysisTool.analysis(product_name, product_dict)
+ RomAnalysisTool.analysis(product_name, product_dict, output_file)
if __name__ == "__main__":
diff --git a/tools/rom_ram_analyzer/standard/README.md b/tools/rom_ram_analyzer/standard/README.md
index 6a1115144755247b414a40bc7de97fcd3402a7eb..cef2cbf0206dce5c9340451e5cb9d54705ab5d5d 100644
--- a/tools/rom_ram_analyzer/standard/README.md
+++ b/tools/rom_ram_analyzer/standard/README.md
@@ -10,7 +10,11 @@
## 支持产品
-主要是rk3568系列,已测试产品包括rk3568 rk3568_mini_system
+主要是rk3568系列,已测试产品包括rk3568、rk3568_mini_system
+
+## 实现思路
+
+利用编译构建自动生成的out/rk3568/packages/phone/system_module_info.json中已有的信息重新组织,对于其中没有子系统和部件的文件,手动查找。目前已知ohos_sa_profile没有
## 使用说明
@@ -30,32 +34,33 @@
1. `-h`或`--help`命令查看帮助
```shell
> python3 rom_analyzer.py -h
- usage: rom_analyzer.py [-h] [-v] -p PROJECT_PATH -j MODULE_INFO_JSON -n PRODUCT_NAME -d PRODUCT_DIR [-o OUTPUT_FILE] [-e EXCEL]
-
+ usage: rom_analyzer.py [-h] [-v] -p PROJECT_PATH -j MODULE_INFO_JSON -n PRODUCT_NAME -d PRODUCT_DIR [-b] [-o OUTPUT_FILE] [-u] [-e EXCEL]
+
analyze rom size of component.
-
- optional arguments:
- -h, --help show this help message and exit
- -v, -version show program\'s version number and exit
- -p PROJECT_PATH, --project_path PROJECT_PATH
- root path of oh. eg: -p ~/oh
- -j MODULE_INFO_JSON, --module_info_json MODULE_INFO_JSON
+
+ options:
+ -h, --help show this help message and exit
+ -v, -version show program\'s version number and exit
+ -p PROJECT_PATH, --project_path PROJECT_PATH
+ root path of openharmony. eg: -p ~/openharmony
+ -j MODULE_INFO_JSON, --module_info_json MODULE_INFO_JSON
path of out/{product_name}/packages/phone/system_module_info.json
- -n PRODUCT_NAME, --product_name PRODUCT_NAME
+ -n PRODUCT_NAME, --product_name PRODUCT_NAME
product name. eg: -n rk3568
- -d PRODUCT_DIR, --product_dir PRODUCT_DIR
+ -d PRODUCT_DIR, --product_dir PRODUCT_DIR
subdirectories of out/{product_name}/packages/phone to be counted.eg: -d system -d vendor
- -o OUTPUT_FILE, --output_file OUTPUT_FILE
+ -b, --baseline add baseline of component to the result(-b) or not.
+ -o OUTPUT_FILE, --output_file OUTPUT_FILE
basename of output file, default: rom_analysis_result. eg: demo/rom_analysis_result
- -e EXCEL, --excel EXCEL
+ -u, --unit_adaptive unit adaptive
+ -e EXCEL, --excel EXCEL
if output result as excel, default: False. eg: -e True
```
1. 使用示例
```shell
- python3 rom_analyzer.py -p ~/nomodify_oh/ -j ../system_module_info.json -n rk3568 -d system -d vendor -d updater -o demo/demo -e True
+ python3 rom_analyzer.py -p ~/oh/ -j ~/oh/out/rk3568/packages/phone/system_module_info.json -n rk3568 -d system -d vendor -d updater -e True -b
# oh:rootpath of oh
- # rk3568: product_name, same as out/{product_name}
- # demo/demo: path of output file, where the second 'demo' is the basename of output file
+ # -b: add baseline info to the result
# -e True:output result in excel format additionally
```
@@ -65,9 +70,15 @@
```json
{
子系统名: {
- "size": 整个子系统输出文件的总大小,
+ "size": 整个子系统输出文件的总大小(单位:字节),
"file_count": 整个子系统产生的文件数,
- 输出文件名: 本文件的大小,
+ 部件名: {
+ "size": 部件的大小(单位:字节),
+ "file_count": 部件对应的文件数,
+ "baseline": 部件的baseline(根据bundle.json生成),
+ "编译产物文件名": 编译产物大小(单位:字节)
+ ...
+ }
...
},
...
@@ -76,13 +87,15 @@
## 附加说明
-1. 由于目前standard产品使用的基本都是自定义的template,能够有效收集更多信息,因此相较于lite_small的分析脚本,本脚本能够具有更高的准确率,请放心使用
+1. 由于目前standard产品使用的基本都是自定义的template,能够有效收集更多信息,因此相较于lite_small的分析脚本,本脚本能够具有更高的准确率,可以放心使用(不出意外的话
# ram_analyzer.py
## 功能介绍
-基于out/{product_name}/packages/phone下所有cfg文件、out/{product_name}/packages/phone/system/profile下所有xml文件,分析各进程及对应部件的ram占用(默认取Pss)
+基于out/{product_name}/packages/phone下所有cfg文件、out/{product_name}/packages/phone/system/profile下所有json文件,rom的分析结果,(rom_ram_baseline.json——可以在rom分析阶段通过-b参数生成)分析各进程及对应部件的ram占用(默认取Pss)
+
+收集cfg、json文件的可供参考命令:`mkdir cfgs && cp $(find ~/oh/out/rk3568/packages/phone -name *.cfg | xargs) cfgs`
结果以json与xls格式存储,其中,json格式是必输出的,xls格式需要-e参数控制。
@@ -101,7 +114,8 @@
```
5. 准备好相关数据:
1. out/{product_name}/packages/phone下所有cfg文件,并将其放置于同一个目录中(ps:同名文件仅保存一份即可)
- 1. out/{product_name}/packages/phone/system/profile下所有xml文件
+ 1. out/{product_name}/packages/phone/system/profile下所有json 文件
+ 1. rom_ram_baseline.json——如果需要在结果中生成基线信息
6. 运行rom_analyzer.py产生的json结果一份(即-o参数对应的文件,默认rom_analysis_result.json)
命令介绍:
@@ -109,40 +123,72 @@
1. 使用`-h`或`--help`查看帮助
```shell
> python .\ram_analyzer.py -h
- usage: ram_analyzer.py [-h] [-v] -x XML_PATH -c CFG_PATH [-j ROM_RESULT] -n DEVICE_NUM [-o OUTPUT_FILENAME] [-e EXCEL]
-
+ usage: ram_analyzer.py [-h] [-v] -s JSON_PATH -c CFG_PATH [-j ROM_RESULT] -n DEVICE_NUM [-b BASELINE_FILE]
+ [-o OUTPUT_FILENAME] [-u] [-e EXCEL]
+
analyze ram size of component
-
+
optional arguments:
- -h, --help show this help message and exit
- -v, -version show program\'s version number and exit
- -x XML_PATH, --xml_path XML_PATH
- path of xml file. eg: -x ~/oh/out/rk3568/packages/phone/system/profile
- -c CFG_PATH, --cfg_path CFG_PATH
+ -h, --help show this help message and exit
+ -v, -version show program\'s version number and exit
+ -s JSON_PATH, --json_path JSON_PATH
+ path of sa json file. eg: -x ~/openharmony/out/rk3568/packages/phone/system/profile
+ -c CFG_PATH, --cfg_path CFG_PATH
path of cfg files. eg: -c ./cfgs/
- -j ROM_RESULT, --rom_result ROM_RESULT
- json file produced by rom_analyzer_v1.0.py, default: ./rom_analysis_result.json.eg: -j ./demo/rom_analysis_result.json
- -n DEVICE_NUM, --device_num DEVICE_NUM
+ -j ROM_RESULT, --rom_result ROM_RESULT
+ json file produced by rom_analyzer_v1.0.py, default: ./rom_analysis_result.json.eg: -j
+ ./demo/rom_analysis_result.json
+ -n DEVICE_NUM, --device_num DEVICE_NUM
device number to be collect hidumper info. eg: -n 7001005458323933328a01fce16d3800
- -o OUTPUT_FILENAME, --output_filename OUTPUT_FILENAME
+ -b BASELINE_FILE, --baseline_file BASELINE_FILE
+ baseline file of rom and ram generated by rom analysis.
+ -o OUTPUT_FILENAME, --output_filename OUTPUT_FILENAME
base name of output file, default: ram_analysis_result. eg: -o ram_analysis_result
- -e EXCEL, --excel EXCEL
+ -u, --unit_adaptive unit adaptive
+ -e EXCEL, --excel EXCEL
if output result as excel, default: False. eg: -e True
```
2. 使用示例:
```shell
- python .\ram_analyzer.py -x .\profile\ -c .\init\ -n 7001005458323933328a01fce16d3800 -j .\rom_analysis_result.json -o /demo/demo -e True
- # demo/demo: path of output file, where the second 'demo' is the basename of output file
- # -e True:output result in excel format additionally
+ python .\ram_analyzer.py -s .\profile\ -c .\cfgs -j .\rom_analysis_result.json -n 7001005458323933328a59a140913900 -e True
+ # -s profile:copy到本地的out/rk3568/package/phone/system/profile目录
+ # -c .\cfgs:copy到本地的out/rk3568/packages/phone下所有cfg
+ # -b rom_ram_baseline.json:指定rom与ram的基线信息文件(可在rom统计阶段使用-b参数生成)
+ # -e True:生成xls格式的结果文件
```
+3. 生成文件说明:
+ 1. refactored_ram_analysis_result.json:结果文件
+ 1. ram_analysis_result.json:结果文件(供看板使用)
+ 1. ram_analysis_result.xls:xls格式存储结果文件
+
## 输出格式说明(json)
+
+1. refactored_ram_analysis_result.json
```json
{
- 进程名:{
- "size": 本进程占用内存的大小,
- 子系统名: {
- 部件名: {
- elf文件名: elf文件大小
+ "子系统名":{
+ "size": 子系统下所有进程占用内存的大小(单位:Byte),
+ "部件名":{
+ "size": 部件下所有进程占用内存的大小(单位:Byte),
+ "进程名":{
+ "size": 本进程占用内存的大小(单位:Byte),
+ "elf":{
+ "拉起本进程的二进制文件名": 该二进制文件的大小(单位:Byte),
+ }
+ }
+ }
+ }
+}
+```
+
+1. ram_analysis_result.json
+```json
+{
+ "进程名":{
+ "size": 本进程占用内存的大小(单位:字节),
+ "子系统名": {
+ "部件名": {
+ "elf文件名": elf文件大小(单位:字节)
...
}
...
@@ -151,4 +197,9 @@
},
...
}
-```
\ No newline at end of file
+```
+
+## 后续工作
+
+
+1. 考虑适当简化逻辑
\ No newline at end of file
diff --git a/tools/rom_ram_analyzer/standard/pkgs/basic_tool.py b/tools/rom_ram_analyzer/standard/pkgs/basic_tool.py
index 96a59ec2bde579deebce71fe8c77ced4e6642345..955f4dfc767c25d05f77cd5cf23f0fadec59cecd 100644
--- a/tools/rom_ram_analyzer/standard/pkgs/basic_tool.py
+++ b/tools/rom_ram_analyzer/standard/pkgs/basic_tool.py
@@ -2,11 +2,41 @@ import sys
import typing
import os
import glob
+import re
from pathlib import Path
from typing import *
+def unit_adaptive(size: int) -> str:
+ unit_list = ["Byte", "KB", "MB", "GB"]
+ index = 0
+ while index < len(unit_list) and size >= 1024:
+ size /= 1024
+ index += 1
+ if index == len(unit_list):
+ index = len(unit_list) - 1
+ size *= 1024
+ return str(round(size, 2)) + unit_list[index]
+
+
class BasicTool:
+ @classmethod
+ def match_paragraph(cls, content: str, start_pattern: str = r"\w+\(\".*?\"\) *{", end_pattern: str = "\}") -> \
+ Iterator[re.Match]:
+ """
+ 匹配代码段,支持单行
+ 注意:ptrn中已经包含前面的空格,所以start_pattern中可以省略
+ :param content: 被匹配的字符串
+ :param start_pattern: 模式的开头
+ :param end_pattern: 模式的结尾
+ :return: 匹配到的段落的迭代器
+ """
+ ptrn = r'^( *){s}(?#匹配开头).*?(?#中间非贪婪)\1(?#如果开头前面有空格,则结尾的前面应该有相同数量的空格)?{e}$(?#匹配结尾)'.format(
+ s=start_pattern, e=end_pattern)
+ ptrn = re.compile(ptrn, re.M | re.S)
+ result = re.finditer(ptrn, content)
+ return result
+
@classmethod
def find_all_files(cls, folder: str, real_path: bool = True, apply_abs: bool = True, de_duplicate: bool = True,
p_filter: typing.Callable = lambda x: True) -> list:
@@ -26,7 +56,36 @@ class BasicTool:
def get_abs_path(cls, path: str) -> str:
return os.path.abspath(os.path.expanduser(path))
+ @classmethod
+ def re_group_1(cls, content: str, pattern: str, **kwargs) -> str:
+ """
+ 匹配正则表达式,如果有匹配到内容,返回group(1)的内容
+ :param content: 要被匹配的内容
+ :param pattern: 进行匹配的模式
+ :return: 匹配到的结果(group(1))
+ TODO 对()的检查应该更严格
+ """
+ if not (r'(' in pattern and r')' in pattern):
+ raise ValueError("parentheses'()' must in the pattern")
+ result = re.search(pattern, content, **kwargs)
+ if result:
+ return result.group(1)
+ return str()
+
+ @classmethod
+ def execute(cls, cmd: str, post_processor: Callable[[Text], Text] = lambda x: x) -> Any:
+ """
+ 封装popen,返回标准输出的列表
+ :param post_processor: 对执行结果进行处理
+ :param cmd: 待执行的命令
+ :return: 经处理过后的字符串列表
+
+ """
+ output = os.popen(cmd).read()
+ output = post_processor(output)
+ return output
+
if __name__ == '__main__':
for i in BasicTool.find_all_files(".", apply_abs=False):
- print(i)
\ No newline at end of file
+ print(i)
diff --git a/tools/rom_ram_analyzer/standard/pkgs/gn_common_tool.py b/tools/rom_ram_analyzer/standard/pkgs/gn_common_tool.py
index 11c2718ffe53ec054c33223ae15bcd6e57890544..f4013e3fce316e5d98c14379a7182cdd1f45f28a 100644
--- a/tools/rom_ram_analyzer/standard/pkgs/gn_common_tool.py
+++ b/tools/rom_ram_analyzer/standard/pkgs/gn_common_tool.py
@@ -1,5 +1,12 @@
import os
import json
+import re
+from typing import *
+
+if __name__ == '__main__':
+ from basic_tool import BasicTool
+else:
+ from pkgs.basic_tool import BasicTool
class GnCommonTool:
@@ -98,21 +105,9 @@ class GnCommonTool:
return part_name, subsystem_name
@classmethod
- def find_part_subsystem(cls, gn_file: str, project_path: str) -> tuple:
- """
- 查找gn_file对应的part_name和subsystem
- 如果在gn中找不到,就到bundle.json中去找
- """
- part_name = None
- subsystem_name = None
- part_var_flag = False # 标识这个变量从gn中取出的原始值是不是变量
- subsystem_var_flag = False
- var_list = list()
- part_name_pattern = r"part_name *=\s*\S*"
- subsystem_pattern = r"subsystem_name *=\s*\S*"
- meta_grep_pattern = "grep -E '{}' {} | head -n 1"
- part_cmd = meta_grep_pattern.format(part_name_pattern, gn_file)
- subsystem_cmd = meta_grep_pattern.format(subsystem_pattern, gn_file)
+ def _parse_part_subsystem(cls, part_var_flag: bool, subsystem_var_flag: bool, var_list: List[str], part_cmd: str,
+ subsystem_cmd: str, gn_file: str, project_path: str) -> Tuple[str, str]:
+ part_name = subsystem_name = None
part = os.popen(part_cmd).read().strip()
if len(part) != 0:
part = part.split('=')[-1].strip()
@@ -145,13 +140,64 @@ class GnCommonTool:
tuple(var_list), gn_file, project_path)[0]
subsystem_name = t if t is not None and len(
t) != 0 else subsystem_name
- if part_name is not None and subsystem_name is not None:
+ return part_name, subsystem_name
+
+ @classmethod
+ def find_part_subsystem(cls, gn_file: str, project_path: str) -> tuple:
+ """
+ 查找gn_file对应的part_name和subsystem
+ 如果在gn中找不到,就到bundle.json中去找
+ """
+ part_var_flag = False # 标识这个变量从gn中取出的原始值是不是变量
+ subsystem_var_flag = False
+ var_list = list()
+ part_name_pattern = r"part_name *=\s*\S*"
+ subsystem_pattern = r"subsystem_name *=\s*\S*"
+ meta_grep_pattern = "grep -E '{}' {} | head -n 1"
+ part_cmd = meta_grep_pattern.format(part_name_pattern, gn_file)
+ subsystem_cmd = meta_grep_pattern.format(subsystem_pattern, gn_file)
+
+ part_name, subsystem_name = cls._parse_part_subsystem(part_var_flag, subsystem_var_flag,
+ var_list, part_cmd, subsystem_cmd, gn_file, project_path)
+ if part_name and subsystem_name:
return part_name, subsystem_name
# 如果有一个没有找到,就要一层层去找bundle.json文件
t_part_name, t_subsystem_name = cls.__find_part_subsystem_from_bundle(
gn_file, stop_tail=project_path)
- if t_part_name is not None:
+ if t_part_name:
part_name = t_part_name
- if t_subsystem_name is not None:
+ if t_subsystem_name:
subsystem_name = t_subsystem_name
return part_name, subsystem_name
+
+
+class GnVariableParser:
+ @classmethod
+ def string_parser(cls, var: str, content: str) -> str:
+ """
+ 解析值为字符串的变量,没有对引号进行去除,如果是a = b这种b为变量的,则无法匹配
+ :param content: 要进行解析的内容
+ :param var: 变量名
+ :return: 变量值[str]
+ """
+ result = BasicTool.re_group_1(
+ content, r"{} *= *[\n]?(\".*?\")".format(var), flags=re.S | re.M)
+ return result
+
+ @classmethod
+ def list_parser(cls, var: str, content: str) -> List[str]:
+ """
+ 解析值为列表的变量,list的元素必须全为数字或字符串,且没有对引号进行去除,如果是a = b这种b为变量的,则无法匹配
+ :param var: 变量名
+ :param content: 要进行
+ :return: 变量值[List]
+ """
+ result = BasicTool.re_group_1(
+ content, r"{} *= *(\[.*?\])".format(var), flags=re.S | re.M)
+ result_list = list()
+ for item in result.lstrip('[').rstrip(']').split('\n'):
+ item = item.strip().strip(',"')
+ if not item:
+ continue
+ result_list.append(item)
+ return result_list
diff --git a/tools/rom_ram_analyzer/standard/pkgs/rom_ram_baseline_collector.py b/tools/rom_ram_analyzer/standard/pkgs/rom_ram_baseline_collector.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc6d2f7dc11688c54b87b4035d6d37eb4f6f828f
--- /dev/null
+++ b/tools/rom_ram_analyzer/standard/pkgs/rom_ram_baseline_collector.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file is to collect baseline information (according to bundle.json)
+
+
+if __name__ == '__main__':
+ from basic_tool import BasicTool
+else:
+ from pkgs.basic_tool import BasicTool
+from typing import Dict, Any
+import json
+import logging
+
+
+class RomRamBaselineCollector:
+ """collect baseline of rom and ram from bundle.json
+ """
+
+ @classmethod
+ def _put(cls, result_dict: Dict, subsystem_name: str, component_name: str, rom_size: str, ram_size: str,
+ bundle_path: str) -> None:
+ if not result_dict.get(subsystem_name):
+ result_dict[subsystem_name] = dict()
+ result_dict[subsystem_name][component_name] = dict()
+ result_dict[subsystem_name][component_name]["rom"] = rom_size
+ result_dict[subsystem_name][component_name]["ram"] = ram_size
+ result_dict[subsystem_name][component_name]["bundle.json"] = bundle_path
+
+ @classmethod
+ def collect(cls, oh_path: str) -> Dict[str, Dict]:
+ def post_handler(x: str) -> list:
+ x = x.split("\n")
+ y = [item for item in x if item]
+ return y
+
+ bundle_list = BasicTool.execute(
+ cmd=f"find {oh_path} -name bundle.json", post_processor=post_handler)
+ rom_ram_baseline_dict: Dict[str, Dict] = dict()
+ for bundle in bundle_list:
+ with open(bundle, 'r', encoding='utf-8') as f:
+ content: Dict[str, Any] = json.loads(f.read())
+ component_info = content.get("component")
+ if not component_info:
+ logging.warning(f"{bundle} has no field of 'component'.")
+ continue
+ component_name = component_info.get("name")
+ subsystem_name = component_info.get("subsystem")
+ rom_baseline = component_info.get("rom")
+ ram_baseline = component_info.get("ram")
+ if not (subsystem_name or rom_baseline or ram_baseline):
+ logging.warning(
+ f"subsystem=\"{subsystem_name}\", rom=\"{rom_baseline}\", ram=\"{ram_baseline}\" in {bundle}")
+ cls._put(rom_ram_baseline_dict, subsystem_name,
+ component_name, rom_baseline, ram_baseline, bundle)
+ return rom_ram_baseline_dict
diff --git a/tools/rom_ram_analyzer/standard/ram_analyzer.py b/tools/rom_ram_analyzer/standard/ram_analyzer.py
index c6c2df5ea30a254b28e59e8b1a7011e2b2f247fc..cc354fb017f911ed39d469d64ff0e82a6d97b6fb 100644
--- a/tools/rom_ram_analyzer/standard/ram_analyzer.py
+++ b/tools/rom_ram_analyzer/standard/ram_analyzer.py
@@ -14,7 +14,7 @@
# limitations under the License.
# This file is to implement the rom analyzation of standard device.
-#
+#
import argparse
import copy
@@ -26,7 +26,9 @@ import sys
import subprocess
import typing
import xml.dom.minidom as dom
+from typing import Dict
from pprint import pprint
+from pkgs.basic_tool import unit_adaptive
from pkgs.simple_excel_writer import SimpleExcelWriter
@@ -41,7 +43,7 @@ class HDCTool:
True:可用
False:不可用
"""
- cp = subprocess.run(["hdc"], capture_output=True)
+ cp = subprocess.run(["hdc", "--help"], capture_output=True)
stdout = str(cp.stdout)
stderr = str(cp.stderr)
return verify_str in stdout or verify_str in stderr
@@ -58,7 +60,6 @@ class HDCTool:
stderr = str(cp.stderr)
return device_num in stderr or device_num in stdout
-
@classmethod
def exec(cls, args: list, output_from: str = "stdout"):
cp = subprocess.run(args, capture_output=True)
@@ -109,6 +110,7 @@ class RamAnalyzer:
for lname in __process_name_list:
if lname.startswith(hname):
return lname
+ return str()
def process_ps_ef(content: str) -> list:
line_list = content.strip().split("\n")[1:]
@@ -116,7 +118,6 @@ class RamAnalyzer:
for line in line_list:
process_name = line.split()[7]
if process_name.startswith('['):
- # 内核进程
continue
process_name_list.append(process_name)
return process_name_list
@@ -133,22 +134,31 @@ class RamAnalyzer:
break
if line.isspace():
continue
- processed: typing.List[typing.Text] = cls.__hidumper_mem_line_process(line)
- if not processed or not processed[0].isnumeric(): # 如果第一列不是数字(pid),就过
+ processed: typing.List[typing.Text] = cls.__hidumper_mem_line_process(
+ line)
+ # 如果第一列不是数字(pid),就过
+ if not processed or not processed[0].isnumeric():
continue
name = processed[1] # 否则的话就取名字,和对应的size
- size = int(processed[cls.__ss_dict.get(ss)])
- process_pss_dict[find_full_process_name(name)] = size
+ size = int(processed[cls.__ss_dict.get(ss)]) * \
+ 1024 # kilo byte to byte
+ full_process_name = find_full_process_name(name)
+ if not full_process_name:
+ print(
+ f"warning: process \"{full_process_name}\" not found in the result of command \"ps -ef\"")
+ continue
+ process_pss_dict[full_process_name] = size
return process_pss_dict
@classmethod
- def process_hidumper_info(cls, device_num: str, ss:str) -> typing.Dict[str, int]:
+ def process_hidumper_info(cls, device_num: str, ss: str) -> typing.Dict[str, int]:
"""
处理进程名与对应进程大小
"""
def exec_once() -> typing.Dict[str, int]:
- stdout = HDCTool.exec(["hdc", "-t", device_num, "shell", "hidumper", "--mem"])
+ stdout = HDCTool.exec(
+ ["hdc", "-t", device_num, "shell", "hidumper", "--mem"])
name_size_dict = cls.__parse_hidumper_mem(stdout, device_num, ss)
return name_size_dict
@@ -162,25 +172,28 @@ class RamAnalyzer:
return exec_once()
@classmethod
- def __parse_process_xml(cls, file_path: str, result_dict: typing.Dict[str, typing.List[str]]):
+ def __parse_process_json(cls, file_path: str, result_dict: typing.Dict[str, typing.List[str]]):
"""
- 解析xml文件,结存存入 result_dict中,格式:{process_name: os_list}
+ 解析json文件,结存存入 result_dict中,格式:{process_name: os_list}
其中,so_list中是so的base_name
"""
- if not (os.path.isfile(file_path) and file_path.endswith(".xml")):
- print("warning: {} not exist or not a xml file".format(file_path))
+ if not (os.path.isfile(file_path) and file_path.endswith(".json")):
+ print("warning: {} not exist or not a json file".format(file_path))
return
- doc = dom.parse(file_path)
- info = doc.getElementsByTagName("info")[0]
- process = info.getElementsByTagName("process")[0]
- process_name = process.childNodes[0].data
- result_dict[process_name] = list()
- libs = info.getElementsByTagName("loadlibs")[0].getElementsByTagName("libpath")
- for lib in libs:
- so = lib.childNodes[0].data
- result_dict.get(process_name).append(os.path.split(so)[-1])
- if debug:
- print(process_name, " ", so)
+ with open(file_path, 'r', encoding='utf-8') as f:
+ j_content: typing.Dict[str, typing.Any] = json.load(f)
+ if "process" not in j_content.keys() or "systemability" not in j_content.keys():
+ print(
+ f"warning: {file_path} has no field 'process' or 'systemability'")
+ return
+ process_name: str = j_content.get("process")
+ elf_list: typing.List[str] = list()
+ for sa in j_content.get("systemability"):
+ libpath: str = sa.get("libpath")
+ if not libpath:
+ continue
+ elf_list.append(libpath)
+ result_dict[process_name] = elf_list
@classmethod
def get_elf_info_from_rom_result(cls, rom_result_json: str) -> typing.Dict[str, typing.Dict[str, str]]:
@@ -193,11 +206,14 @@ class RamAnalyzer:
rom_info_dict = json.load(f)
elf_info_dict: typing.Dict[str, typing.Dict[str, str]] = dict()
for subsystem_name in rom_info_dict.keys():
- sub_val_dict: typing.Dict[str, typing.Any] = rom_info_dict.get(subsystem_name)
+ sub_val_dict: typing.Dict[str, typing.Any] = rom_info_dict.get(
+ subsystem_name)
delete_values_from_dict(sub_val_dict, ["size", "file_count"])
for component_name in sub_val_dict.keys():
- component_val_dict: typing.Dict[str, str] = sub_val_dict.get(component_name)
- delete_values_from_dict(component_val_dict, ["size", "file_count"])
+ component_val_dict: typing.Dict[str, str] = sub_val_dict.get(
+ component_name)
+ delete_values_from_dict(component_val_dict, [
+ "size", "file_count"])
for file_name, size in component_val_dict.items():
file_basename: str = os.path.split(file_name)[-1]
elf_info_dict[file_basename] = {
@@ -205,6 +221,7 @@ class RamAnalyzer:
"component_name": component_name,
"size": size
}
+
return elf_info_dict
@classmethod
@@ -224,7 +241,8 @@ class RamAnalyzer:
if first.endswith("sa_main"):
# 由sa_main去来起进程
xml_base_name = os.path.split(path_list[0])[-1]
- cls.__parse_process_xml(os.path.join(profile_path, xml_base_name), result_dict)
+ cls.__parse_process_json(os.path.join(
+ profile_path, xml_base_name), result_dict)
else:
# 直接执行
if result_dict.get(process_name) is None:
@@ -232,25 +250,14 @@ class RamAnalyzer:
result_dict.get(process_name).append(os.path.split(first)[-1])
@classmethod
- def get_process_so_relationship(cls, xml_path: str, cfg_path: str, profile_path: str) -> typing.Dict[
+ def get_process_so_relationship(cls, cfg_path: str, profile_path: str) -> typing.Dict[
str, typing.List[str]]:
"""
- 从out/{product_name}/packages/phone/sa_profile/merged_sa查找xml文件并处理得到进程与so的对应关系
+ parse the relationship between process and elf file
"""
# 从merged_sa里面收集
- xml_list = glob.glob(xml_path + os.sep + "*[.]xml", recursive=True)
process_elf_dict: typing.Dict[str, typing.List[str]] = dict()
- for xml in xml_list:
- if debug:
- print("parsing: ", xml)
- try:
- cls.__parse_process_xml(xml, process_elf_dict)
- except:
- print("parse '{}' failed".format(xml))
- finally:
- ...
- # 从system/etc/init/*.cfg中收集,如果是sa_main拉起的,则从system/profile/*.xml中进行解析
- cfg_list = glob.glob(cfg_path + os.sep + "*[.]cfg", recursive=True)
+ cfg_list = glob.glob(cfg_path + os.sep + "*.cfg", recursive=True)
for cfg in cfg_list:
if debug:
print("parsing: ", cfg)
@@ -263,54 +270,104 @@ class RamAnalyzer:
return process_elf_dict
@classmethod
- def __save_result_as_excel(cls, data_dict: dict, filename: str, ss: str):
+ def __inside_save_result_as_excel(cls, baseline_file, subsystem_name, component_name, component_size,
+ component_baseline, process_name, process_size, elf_name, elf_size):
+ if baseline_file:
+ return [subsystem_name, component_name, component_size,
+ component_baseline, process_name, process_size, elf_name, elf_size]
+ else:
+ return [subsystem_name, component_name, component_size,
+ process_name, process_size, elf_name, elf_size]
+
+ @classmethod
+ def __save_result_as_excel(cls, data_dict: dict, filename: str, ss: str, baseline_file: str, unit_adapt: bool):
"""
保存结果到excel中
- 进程名:{
- "size": xxx,
- 子系统名:{
- 部件名:{
- 二进制文件: xxx,
- ...
+ 子系统:{
+ "size": 1234,
+ 部件:{
+ "size":123,
+ "base_line":124,
+ 进程:{
+ "size":12,
+ "elf":{
+ "elf_file_1":elf_size,
+ ...
+ }
}
}
}
"""
tmp_dict = copy.deepcopy(data_dict)
writer = SimpleExcelWriter("ram_info")
- writer.set_sheet_header(
- ["process_name", "process_size({}, KB)".format(ss), "subsystem_name","component_name", "elf_name", "elf_size(KB)"])
- process_start_r = 1
- process_end_r = 0
- process_c = 0
- subsystem_c = 2
+ header_unit = "" if unit_adapt else ", Byte"
+ header = [
+ "subsystem_name", "component_name", f"component_size(ram{header_unit})", "process_name",
+ f"process_size({ss}{header_unit})", "elf", f"elf_size{'' if unit_adapt else '(Byte)'}"
+ ]
+ if baseline_file:
+ header = [
+ "subsystem_name", "component_name", f"component_size(ram{header_unit})", "baseline", "process_name",
+ f"process_size({ss}{header_unit})", "elf", f"elf_size{'' if unit_adapt else '(Byte)'}"
+ ]
+ writer.set_sheet_header(header)
+ subsystem_c = 0
subsystem_start_r = 1
subsystem_end_r = 0
- process_size_c = 1
+
+ component_c = 1
component_start_r = 1
component_end_r = 0
- component_c = 3
- for process_name in tmp_dict.keys():
- process_val_dict: typing.Dict[str, typing.Dict[str, int]] = tmp_dict.get(process_name)
- process_size = process_val_dict.get("size")
- delete_values_from_dict(process_val_dict, ["size"])
- for subsystem_name, subsystem_val_dict in process_val_dict.items(): # 遍历subsystem
- for component_name, component_val_dict in subsystem_val_dict.items(): # 遍历component
- elf_count_of_component = len(component_val_dict)
- for elf_name, size in component_val_dict.items(): # 遍里elf
- writer.append_line([process_name, process_size, subsystem_name, component_name, elf_name, "%.2f" % (size / 1024)])
- component_end_r += elf_count_of_component
- subsystem_end_r += elf_count_of_component
- # 重写component
- writer.write_merge(component_start_r, component_c, component_end_r,
- component_c, component_name)
- component_start_r = component_end_r + 1
- process_end_r += elf_count_of_component
- writer.write_merge(subsystem_start_r, subsystem_c, subsystem_end_r, subsystem_c, subsystem_name)
- subsystem_start_r = subsystem_end_r+1
- writer.write_merge(process_start_r, process_c, process_end_r, process_c, process_name)
- writer.write_merge(process_start_r, process_size_c, process_end_r, process_size_c, process_size)
- process_start_r = process_end_r + 1
+ component_size_c = 2
+ baseline_c = 3
+
+ process_start_r = 1
+ process_end_r = 0
+ process_c = 4
+ process_size_c = 5
+ if not baseline_file:
+ process_c -= 1
+ process_size_c -= 1
+ for subsystem_name, subsystem_info in tmp_dict.items():
+ subsystem_size = subsystem_info.get("size")
+ if subsystem_size:
+ del subsystem_info["size"]
+ for component_name, component_info in subsystem_info.items():
+ component_size = component_info.get("size")
+ component_baseline = component_info.get("baseline")
+ if "size" in component_info.keys():
+ del component_info["size"]
+ if "baseline" in component_info.keys():
+ del component_info["baseline"]
+ for process_name, process_info in component_info.items():
+ process_size = process_info.get("size")
+ elf_info = process_info.get("elf")
+ for elf_name, elf_size in elf_info.items():
+ line = cls.__inside_save_result_as_excel(baseline_file, subsystem_name, component_name,
+ component_size,
+ component_baseline, process_name, process_size,
+ elf_name, elf_size)
+ writer.append_line(line)
+ elf_count = len(elf_info)
+ process_end_r += elf_count
+ component_end_r += elf_count
+ subsystem_end_r += elf_count
+ writer.write_merge(
+ process_start_r, process_c, process_end_r, process_c, process_name)
+ writer.write_merge(
+ process_start_r, process_size_c, process_end_r, process_size_c, process_size)
+ process_start_r = process_end_r + 1
+ writer.write_merge(component_start_r, component_c,
+ component_end_r, component_c, component_name)
+ writer.write_merge(component_start_r, component_size_c,
+ component_end_r, component_size_c, component_size)
+ if baseline_file:
+ writer.write_merge(component_start_r, baseline_c,
+ component_end_r, baseline_c, component_baseline)
+ component_start_r = component_end_r + 1
+ writer.write_merge(subsystem_start_r, subsystem_c,
+ subsystem_end_r, subsystem_c, subsystem_name)
+ subsystem_start_r = subsystem_end_r + 1
writer.save(filename)
@classmethod
@@ -324,45 +381,93 @@ class RamAnalyzer:
evaluator:评估elf文件的从phone下面开始的路径与service_name的关系,评判如何才是找到了
returns: 是否查找到,elf文件名,部件名,size
"""
- subsystem_name_list = [subsystem_name] if subsystem_name != "*" else rom_result_dict.keys()
+ subsystem_name_list = [
+ subsystem_name] if subsystem_name != "*" else rom_result_dict.keys()
for sn in subsystem_name_list:
sub_val_dict = rom_result_dict.get(sn)
- component_name_list = [component_name] if component_name != '*' else sub_val_dict.keys()
+ component_name_list = [
+ component_name] if component_name != '*' else sub_val_dict.keys()
for cn in component_name_list:
if cn == "size" or cn == "file_count":
continue
- component_val_dict: typing.Dict[str, int] = sub_val_dict.get(cn)
+ component_val_dict: typing.Dict[str,
+ int] = sub_val_dict.get(cn)
for k, v in component_val_dict.items():
if k == "size" or k == "file_count":
continue
if not evaluator(service_name, k):
continue
- return True, os.path.split(k)[-1],sn, cn, v
+ return True, os.path.split(k)[-1], sn, cn, v
return False, str(), str(), str(), int()
@classmethod
- def analysis(cls, cfg_path: str, xml_path: str, rom_result_json: str, device_num: str,
- output_file: str, ss: str, output_excel: bool):
- """
- process size subsystem/component so so_size
- """
- if not HDCTool.verify_hdc():
- print("error: Command 'hdc' not found")
- return
- if not HDCTool.verify_device(device_num):
- print("error: {} is inaccessible or not found".format(device_num))
- return
- with open(rom_result_json, 'r', encoding='utf-8') as f:
- rom_result_dict: typing.Dict = json.loads(f.read())
- # 从rom的分析结果中将需要的elf信息重组
- so_info_dict: typing.Dict[
- str, typing.Dict[str["component_name|subsystem_name|size"], str]] = cls.get_elf_info_from_rom_result(
- rom_result_json)
- process_elf_dict: typing.Dict[str, typing.List[str]] = cls.get_process_so_relationship(xml_path, cfg_path,
- profile_path)
- process_size_dict: typing.Dict[str, int] = cls.process_hidumper_info(device_num, ss)
- result_dict: typing.Dict[str, typing.Dict[str, typing.Any]] = dict()
+ def add_baseline(self, refactored_result_dict: Dict, baseline_file: str) -> None:
+ with open(baseline_file, 'r', encoding='utf-8') as f:
+ baseline_dict = json.load(f)
+ for subsystem_name, subsystem_info in refactored_result_dict.items():
+ for component_name, component_info in subsystem_info.items():
+ if component_name == "size":
+ continue
+ if not baseline_dict.get(subsystem_name):
+ continue
+ if not baseline_dict[subsystem_name].get(component_name):
+ continue
+ component_info["baseline"] = baseline_dict[subsystem_name][component_name].get(
+ "ram")
+
+ @classmethod
+ def inside_refactored_result_unit_adaptive(cls, process_info):
+ for elf_name, elf_size in process_info["elf"].items():
+ process_info["elf"][elf_name] = unit_adaptive(elf_size)
+ return process_info
+
+ @classmethod
+ def refactored_result_unit_adaptive(cls, result_dict: Dict[str, Dict]) -> None:
+ for subsystem_name, subsystem_info in result_dict.items():
+ sub_size = unit_adaptive(subsystem_info["size"])
+ del subsystem_info["size"]
+ for component_name, component_info in subsystem_info.items():
+ com_size = unit_adaptive(component_info["size"])
+ del component_info["size"]
+ for process_name, process_info in component_info.items():
+ pro_size = unit_adaptive(process_info["size"])
+ del process_info["size"]
+ process_info = cls.inside_refactored_result_unit_adaptive(process_info)
+ process_info["size"] = pro_size
+ component_info["size"] = com_size
+ subsystem_info["size"] = sub_size
+
+ @classmethod
+ def result_process1(cls, result_dict, process_name, process_size, elf, size):
+ result_dict[process_name] = dict()
+ result_dict[process_name]["size"] = process_size
+ result_dict[process_name]["startup"] = dict()
+ result_dict[process_name]["startup"]["init"] = dict()
+ result_dict[process_name]["startup"]["init"][elf if len(
+ elf) != 0 else "UNKNOWN"] = size
+ return result_dict
+
+ @classmethod
+ def result_process2(cls, result_dict, process_name, subsystem_name, process_size, component_name, hap_name, size):
+ result_dict[process_name] = dict()
+ result_dict[process_name]["size"] = process_size
+ result_dict[process_name][subsystem_name] = dict()
+ result_dict[process_name][subsystem_name][component_name] = dict()
+ result_dict[process_name][subsystem_name][component_name][hap_name if len(
+ hap_name) != 0 else "UNKNOWN"] = size
+ return result_dict
+
+ @classmethod
+ def result_process3(cls, result_dict, process_name, process_size):
+ result_dict[process_name] = dict()
+ result_dict[process_name]["size"] = process_size
+ result_dict[process_name]["UNKNOWN"] = dict()
+ result_dict[process_name]["UNKNOWN"]["UNKNOWN"] = dict()
+ result_dict[process_name]["UNKNOWN"]["UNKNOWN"]["UNKNOWN"] = int()
+ return result_dict
+ @classmethod
+ def result_process4(cls, result_dict, process_size_dict, rom_result_dict, process_elf_dict, so_info_dict):
def get(key: typing.Any, dt: typing.Dict[str, typing.Any]):
for k, v in dt.items():
if k.startswith(key) or (len(v) > 0 and key == v[0]):
@@ -370,40 +475,36 @@ class RamAnalyzer:
return v
for process_name, process_size in process_size_dict.items(): # 从进程出发
+ if not process_name:
+ print("warning: an empty 'process_name' has been found.")
+ continue
# 如果部件是init,特殊处理
if process_name == "init":
- _, elf,_, _, size = cls.find_elf_size_from_rom_result(process_name, "startup", "init",
- lambda x, y: os.path.split(y)[
- -1].lower() == x.lower(),
- rom_result_dict)
- result_dict[process_name] = dict()
- result_dict[process_name]["size"] = process_size
- result_dict[process_name]["startup"] = dict()
- result_dict[process_name]["startup"]["init"] = dict()
- result_dict[process_name]["startup"]["init"][elf if len(elf) != 0 else "UNKNOWN"] = size
+ _, elf, _, _, size = cls.find_elf_size_from_rom_result(process_name, "startup", "init",
+ lambda x, y: os.path.split(y)[
+ -1].lower() == x.lower(),
+ rom_result_dict)
+ result_dict = cls.result_process1(result_dict, process_name, process_size, elf, size)
continue
# 如果是hap,特殊处理
if (process_name.startswith("com.") or process_name.startswith("ohos.")):
- _, hap_name, subsystem_name, component_name, size = cls.find_elf_size_from_rom_result(process_name, "*", "*",
- lambda x, y: len(
- y.split(
- '/')) >= 3 and x.lower().startswith(
- y.split('/')[2].lower()),
- rom_result_dict)
- result_dict[process_name] = dict()
- result_dict[process_name]["size"] = process_size
- result_dict[process_name][subsystem_name] = dict()
- result_dict[process_name][subsystem_name][component_name] = dict()
- result_dict[process_name][subsystem_name][component_name][hap_name if len(hap_name) != 0 else "UNKNOWN"] = size
+ _, hap_name, subsystem_name, component_name, size = cls.find_elf_size_from_rom_result(process_name, "*",
+ "*",
+ lambda x, y: len(
+ y.split(
+ '/')) >= 3 and x.lower().startswith(
+ y.split('/')[
+ 2].lower()),
+ rom_result_dict)
+ result_dict = cls.result_process2(result_dict, process_name, subsystem_name, process_size,
+ component_name, hap_name, size)
continue
- so_list: list = get(process_name, process_elf_dict) # 得到进程相关的elf文件list
+ # 得到进程相关的elf文件list
+ so_list: list = get(process_name, process_elf_dict)
if so_list is None:
- print("warning: process '{}' not found in .xml or .cfg".format(process_name))
- result_dict[process_name] = dict()
- result_dict[process_name]["size"] = process_size
- result_dict[process_name]["UNKNOWN"] = dict()
- result_dict[process_name]["UNKNOWN"]["UNKNOWN"] = dict()
- result_dict[process_name]["UNKNOWN"]["UNKNOWN"]["UNKNOWN"] = int()
+ print("warning: process '{}' not found in .json or .cfg".format(
+ process_name))
+ result_dict = cls.result_process3(result_dict, process_name, process_size)
continue
result_dict[process_name] = dict()
result_dict[process_name]["size"] = process_size
@@ -413,7 +514,8 @@ class RamAnalyzer:
result_dict[process_name]["UNKNOWN"] = dict()
result_dict[process_name]["UNKNOWN"]["UNKNOWN"] = dict()
result_dict[process_name]["UNKNOWN"]["UNKNOWN"][so] = int()
- print("warning: '{}' in {} not found in json from rom analysis result".format(so, process_name))
+ print("warning: '{}' in {} not found in json from rom analysis result".format(
+ so, process_name))
continue
component_name = unit.get("component_name")
subsystem_name = unit.get("subsystem_name")
@@ -423,13 +525,81 @@ class RamAnalyzer:
if result_dict.get(process_name).get(subsystem_name).get(component_name) is None:
result_dict[process_name][subsystem_name][component_name] = dict()
result_dict[process_name][subsystem_name][component_name][so] = so_size
+ return result_dict
+
+ @classmethod
+ def analysis(cls, cfg_path: str, json_path: str, rom_result_json: str, device_num: str,
+ output_file: str, ss: str, output_excel: bool, baseline_file: str, unit_adapt: bool):
+ """
+ process size subsystem/component so so_size
+ """
+ if not HDCTool.verify_hdc():
+ print("error: Command 'hdc' not found")
+ return
+ if not HDCTool.verify_device(device_num):
+ print("error: {} is inaccessible or not found".format(device_num))
+ return
+ with open(rom_result_json, 'r', encoding='utf-8') as f:
+ rom_result_dict: typing.Dict = json.loads(f.read())
+ # 从rom的分析结果中将需要的elf信息重组
+ so_info_dict: typing.Dict[
+ str, typing.Dict[str["component_name|subsystem_name|size"], str]] = cls.get_elf_info_from_rom_result(
+ rom_result_json)
+ process_elf_dict: typing.Dict[str, typing.List[str]] = cls.get_process_so_relationship(cfg_path,
+ json_path)
+ process_size_dict: typing.Dict[str, int] = cls.process_hidumper_info(
+ device_num, ss)
+ result_dict: typing.Dict[str, typing.Dict[str, typing.Any]] = dict()
+ result_dict = cls.result_process4(result_dict, process_size_dict, rom_result_dict, process_elf_dict,
+ so_info_dict)
base_dir, _ = os.path.split(output_file)
if len(base_dir) != 0 and not os.path.isdir(base_dir):
os.makedirs(base_dir, exist_ok=True)
with open(output_file + ".json", 'w', encoding='utf-8') as f:
- f.write(json.dumps(result_dict, indent=4))
+ json.dump(result_dict, f, indent=4)
+ refactored_result: Dict[str, Dict] = refacotr_result(result_dict)
+ if unit_adapt:
+ cls.refactored_result_unit_adaptive(refactored_result)
+ if baseline_file:
+ cls.add_baseline(refactored_result, baseline_file)
+ with open(f"refactored_{output_file}.json", 'w', encoding='utf-8') as f:
+ json.dump(refactored_result, f, indent=4)
if output_excel:
- cls.__save_result_as_excel(result_dict, output_file + ".xls", ss)
+ cls.__save_result_as_excel(
+ refactored_result, output_file + ".xls", ss, baseline_file, unit_adapt)
+
+
+def inside_refacotr_result(component_info, refactored_ram_dict, subsystem_name, component_name, process_name,
+ process_size):
+ for elf_name, elf_size in component_info.items():
+ if not refactored_ram_dict.get(subsystem_name):
+ refactored_ram_dict[subsystem_name] = dict()
+ refactored_ram_dict[subsystem_name]["size"] = 0
+ if not refactored_ram_dict[subsystem_name].get(component_name):
+ refactored_ram_dict[subsystem_name][component_name] = dict(
+ )
+ refactored_ram_dict[subsystem_name][component_name]["size"] = 0
+ refactored_ram_dict[subsystem_name][component_name][process_name] = dict(
+ )
+ refactored_ram_dict[subsystem_name][component_name][process_name]["size"] = process_size
+ refactored_ram_dict[subsystem_name][component_name][process_name]["elf"] = dict(
+ )
+ refactored_ram_dict[subsystem_name][component_name][process_name]["elf"][elf_name] = elf_size
+ refactored_ram_dict[subsystem_name]["size"] += process_size
+ refactored_ram_dict[subsystem_name][component_name]["size"] += process_size
+ return refactored_ram_dict
+
+
+def refacotr_result(ram_result: Dict[str, Dict]) -> Dict[str, Dict]:
+ refactored_ram_dict: Dict[str, Dict] = dict()
+ for process_name, process_info in ram_result.items():
+ process_size = process_info.get("size")
+ del process_info["size"]
+ for subsystem_name, subsystem_info in process_info.items():
+ for component_name, component_info in subsystem_info.items():
+ refactored_ram_dict = inside_refacotr_result(component_info, refactored_ram_dict, subsystem_name,
+ component_name, process_name, process_size)
+ return refactored_ram_dict
def get_args():
@@ -439,8 +609,8 @@ def get_args():
)
parser.add_argument("-v", "-version", action="version",
version=f"version {VERSION}")
- parser.add_argument("-x", "--xml_path", type=str, required=True,
- help="path of xml file. eg: -x ~/openharmony/out/rk3568/packages/phone/system/profile")
+ parser.add_argument("-s", "--json_path", type=str, required=True,
+ help="path of sa json file. eg: -x ~/openharmony/out/rk3568/packages/phone/system/profile")
parser.add_argument("-c", "--cfg_path", type=str, required=True,
help="path of cfg files. eg: -c ./cfgs/")
parser.add_argument("-j", "--rom_result", type=str, default="./rom_analysis_result.json",
@@ -448,21 +618,32 @@ def get_args():
"eg: -j ./demo/rom_analysis_result.json")
parser.add_argument("-n", "--device_num", type=str, required=True,
help="device number to be collect hidumper info. eg: -n 7001005458323933328a01fce16d3800")
+ parser.add_argument("-b", "--baseline_file", type=str, default="",
+ help="baseline file of rom and ram generated by rom analysis.")
parser.add_argument("-o", "--output_filename", default="ram_analysis_result", type=str,
help="base name of output file, default: ram_analysis_result. eg: -o ram_analysis_result")
+ parser.add_argument("-u", "--unit_adaptive",
+ action="store_true", help="unit adaptive")
parser.add_argument("-e", "--excel", type=bool, default=False,
help="if output result as excel, default: False. eg: -e True")
args = parser.parse_args()
return args
+def abspath(path: str) -> str:
+ return os.path.abspath(os.path.expanduser(path))
+
+
if __name__ == '__main__':
args = get_args()
- cfg_path = args.cfg_path
- profile_path = args.xml_path
+ cfg_path_name = abspath(args.cfg_path)
+ profile_path_name = abspath(args.json_path)
rom_result = args.rom_result
- device_num = args.device_num
+ device = args.device_num
output_filename = args.output_filename
- output_excel = args.excel
- RamAnalyzer.analysis(cfg_path, profile_path, rom_result,
- device_num=device_num, output_file=output_filename, ss="Pss", output_excel=output_excel)
+ baseline = args.baseline_file
+ output_excel_path = args.excel
+ unit_adaptiv = args.unit_adaptive
+ RamAnalyzer.analysis(cfg_path_name, profile_path_name, rom_result,
+ device_num=device, output_file=output_filename, ss="Pss", output_excel=output_excel_path,
+ baseline_file=baseline, unit_adapt=unit_adaptiv)
diff --git a/tools/rom_ram_analyzer/standard/rom_analyzer.py b/tools/rom_ram_analyzer/standard/rom_analyzer.py
index b748563f8880268feeb184435bb1aae23de32ca8..08058a61422f87c36da1d174f4e7a54becf12c12 100644
--- a/tools/rom_ram_analyzer/standard/rom_analyzer.py
+++ b/tools/rom_ram_analyzer/standard/rom_analyzer.py
@@ -22,9 +22,11 @@ import sys
import typing
from copy import deepcopy
from typing import *
-
-from pkgs.basic_tool import BasicTool
-from pkgs.gn_common_tool import GnCommonTool
+import re
+import subprocess
+from pkgs.rom_ram_baseline_collector import RomRamBaselineCollector
+from pkgs.basic_tool import BasicTool, unit_adaptive
+from pkgs.gn_common_tool import GnCommonTool, GnVariableParser
from pkgs.simple_excel_writer import SimpleExcelWriter
debug = bool(sys.gettrace())
@@ -32,49 +34,126 @@ debug = bool(sys.gettrace())
NOTFOUND = "NOTFOUND"
+class PreCollector:
+ """
+ collect some info that system_module_info.json dosn't contains
+ """
+
+ def __init__(self, project_path: str) -> None:
+ self.info_dict: Dict[str, Any] = dict()
+ self.project_path = BasicTool.get_abs_path(project_path)
+ self.result_dict = dict()
+
+ def _process_single_sa(self, item: str, start_pattern: str):
+ gn, _, _ = item.split(':')
+ with open(gn, 'r', encoding='utf-8') as f:
+ content = f.read()
+ p_itr: Iterator[re.Match] = BasicTool.match_paragraph(
+ content=content, start_pattern=start_pattern)
+ for p in p_itr:
+ p_content = p.group()
+ files: List[str] = GnVariableParser.list_parser(
+ "sources", p_content)
+ component_name, subsystem_name = GnCommonTool.find_part_subsystem(
+ gn, self.project_path)
+ for f in files:
+ f = f.split('/')[-1]
+ self.result_dict[f] = {
+ "subsystem_name": subsystem_name,
+ "component_name": component_name,
+ "gn_path": gn
+ }
+
+ def collect_sa_profile(self):
+ grep_kw = r"ohos_sa_profile"
+ grep_cmd = f"grep -rn '{grep_kw}' --include=BUILD.gn {self.project_path}"
+ content = BasicTool.execute(
+ grep_cmd, post_processor=lambda x: x.split('\n'))
+ for item in content:
+ if not item:
+ continue
+ self._process_single_sa(item, start_pattern=grep_kw)
+
+
class RomAnalyzer:
+
@classmethod
def __collect_product_info(cls, system_module_info_json: Text,
- project_path: Text) -> Dict[Text, Dict[Text, Text]]:
+ project_path: Text, extra_info: Dict[str, Dict]) -> Dict[Text, Dict[Text, Text]]:
"""
根据system_module_info.json生成target字典
+ format:
+ {
+ "{file_name}":{
+ "{subsytem_name}": abc,
+ "{component_name}": def,
+ "{gn_path}": ghi
+ }
+ }
+ if the unit of system_module_info.json has not field "label" and the "type" is "sa_profile",
+ find the subsystem_name and component_name in the BUILD.gn
"""
with open(system_module_info_json, 'r', encoding='utf-8') as f:
product_list = json.loads(f.read())
project_path = BasicTool.get_abs_path(project_path)
product_info_dict: Dict[Text, Dict[Text, Text]] = dict()
for unit in product_list:
+ cs_flag = False
dest: List = unit.get("dest")
- if dest is None:
+ if not dest:
print("warning: keyword 'dest' not found in {}".format(
system_module_info_json))
continue
label: Text = unit.get("label")
gn_path = component_name = subsystem_name = None
if label:
+ cs_flag = True
gn_path = os.path.join(project_path, label.split(':')[
- 0].lstrip('/'), "BUILD.gn")
+ 0].lstrip('/'), "BUILD.gn")
component_name = unit.get("part_name")
subsystem_name = unit.get("subsystem_name")
- if (not component_name) or (not subsystem_name):
+ if not component_name:
+ cn, sn = GnCommonTool.find_part_subsystem(
+ gn_path, project_path)
+ component_name = cn
+ if not subsystem_name:
cn, sn = GnCommonTool.find_part_subsystem(
gn_path, project_path)
- component_name = cn if not component_name else component_name
- subsystem_name = sn if not subsystem_name else subsystem_name
+ subsystem_name = sn
else:
print("warning: keyword 'label' not found in {}".format(unit))
for target in dest:
- product_info_dict[target] = {
- "component_name": component_name,
- "subsystem_name": subsystem_name,
- "gn_path": gn_path,
- }
+ if cs_flag:
+ product_info_dict[target] = {
+ "component_name": component_name,
+ "subsystem_name": subsystem_name,
+ "gn_path": gn_path,
+ }
+ continue
+ tmp = target.split('/')[-1]
+ pre_info = extra_info.get(tmp)
+ if not pre_info:
+ continue
+ else:
+ product_info_dict[target] = pre_info
return product_info_dict
@classmethod
- def __save_result_as_excel(cls, result_dict: dict, output_name: str):
+ def __inside_save_result_as_excel(cls, add_baseline, subsystem_name, component_name,
+ baseline, file_name, size):
+ if add_baseline:
+ return [subsystem_name, component_name,
+ baseline, file_name, size]
+ else:
+ return [subsystem_name, component_name, file_name, size]
+
+ @classmethod
+ def __save_result_as_excel(cls, result_dict: dict, output_name: str, add_baseline: bool):
header = ["subsystem_name", "component_name",
"output_file", "size(Byte)"]
+ if add_baseline:
+ header = ["subsystem_name", "component_name", "baseline",
+ "output_file", "size(Byte)"]
tmp_dict = deepcopy(result_dict)
excel_writer = SimpleExcelWriter("rom")
excel_writer.set_sheet_header(headers=header)
@@ -84,7 +163,8 @@ class RomAnalyzer:
component_start_row = 1
component_end_row = 0
component_col = 1
-
+ if add_baseline:
+ baseline_col = 2
for subsystem_name in tmp_dict.keys():
subsystem_dict = tmp_dict.get(subsystem_name)
subsystem_size = subsystem_dict.get("size")
@@ -98,15 +178,22 @@ class RomAnalyzer:
component_name)
component_size = component_dict.get("size")
component_file_count = component_dict.get("file_count")
+ baseline = component_dict.get("baseline")
del component_dict["file_count"]
del component_dict["size"]
+ if add_baseline:
+ del component_dict["baseline"]
component_end_row += component_file_count
for file_name, size in component_dict.items():
- excel_writer.append_line(
- [subsystem_name, component_name, file_name, size])
+ line = cls.__inside_save_result_as_excel(add_baseline, subsystem_name, component_name,
+ baseline, file_name, size)
+ excel_writer.append_line(line)
excel_writer.write_merge(component_start_row, component_col, component_end_row, component_col,
component_name)
+ if add_baseline:
+ excel_writer.write_merge(component_start_row, baseline_col, component_end_row, baseline_col,
+ baseline)
component_start_row = component_end_row + 1
excel_writer.write_merge(subsystem_start_row, subsystem_col, subsystem_end_row, subsystem_col,
subsystem_name)
@@ -114,18 +201,19 @@ class RomAnalyzer:
excel_writer.save(output_name + ".xls")
@classmethod
- def __put(cls, unit: typing.Dict[Text, Any], result_dict: typing.Dict[Text, Dict]):
- """
- subsystem_name:{
- component_name: {
- file_name: file_size
- }
- }
- """
+ def __put(cls, unit: typing.Dict[Text, Any], result_dict: typing.Dict[Text, Dict], baseline_dict: Dict[str, Any],
+ baseline: bool):
+
component_name = NOTFOUND if unit.get(
"component_name") is None else unit.get("component_name")
subsystem_name = NOTFOUND if unit.get(
"subsystem_name") is None else unit.get("subsystem_name")
+
+ def get_rom_baseline():
+ if (not baseline_dict.get(subsystem_name)) or (not baseline_dict.get(subsystem_name).get(component_name)):
+ return str()
+ return baseline_dict.get(subsystem_name).get(component_name).get("rom")
+
size = unit.get("size")
relative_filepath = unit.get("relative_filepath")
if result_dict.get(subsystem_name) is None: # 子系统
@@ -137,15 +225,34 @@ class RomAnalyzer:
result_dict[subsystem_name][component_name] = dict()
result_dict[subsystem_name][component_name]["size"] = 0
result_dict[subsystem_name][component_name]["file_count"] = 0
+ if baseline:
+ result_dict[subsystem_name][component_name]["baseline"] = get_rom_baseline(
+ )
+
result_dict[subsystem_name]["size"] += size
result_dict[subsystem_name]["file_count"] += 1
result_dict[subsystem_name][component_name]["size"] += size
result_dict[subsystem_name][component_name]["file_count"] += 1
result_dict[subsystem_name][component_name][relative_filepath] = size
+ @classmethod
+ def result_unit_adaptive(self, result_dict: Dict[str, Dict]) -> None:
+ for subsystem_name, subsystem_info in result_dict.items():
+ size = unit_adaptive(subsystem_info["size"])
+ count = subsystem_info["file_count"]
+ if "size" in subsystem_info.keys():
+ del subsystem_info["size"]
+ if "file_count" in subsystem_info.keys():
+ del subsystem_info["file_count"]
+ for component_name, component_info in subsystem_info.items():
+ component_info["size"] = unit_adaptive(component_info["size"])
+ subsystem_info["size"] = size
+ subsystem_info["file_count"] = count
+
@classmethod
def analysis(cls, system_module_info_json: Text, product_dirs: List[str],
- project_path: Text, product_name: Text, output_file: Text, output_execel: bool):
+ project_path: Text, product_name: Text, output_file: Text, output_execel: bool, add_baseline: bool,
+ unit_adapt: bool):
"""
system_module_info_json: json文件
product_dirs:要处理的产物的路径列表如["vendor", "system/"]
@@ -154,11 +261,19 @@ class RomAnalyzer:
output_file: basename of output file
"""
project_path = BasicTool.get_abs_path(project_path)
+ rom_baseline_dict: Dict[str, Any] = RomRamBaselineCollector.collect(
+ project_path)
+ with open("rom_ram_baseline.json", 'w', encoding='utf-8') as f:
+ json.dump(rom_baseline_dict, f, indent=4)
phone_dir = os.path.join(
project_path, "out", product_name, "packages", "phone")
product_dirs = [os.path.join(phone_dir, d) for d in product_dirs]
+ pre_collector = PreCollector(project_path)
+ pre_collector.collect_sa_profile()
+ extra_product_info_dict: Dict[str, Dict] = pre_collector.result_dict
product_info_dict = cls.__collect_product_info(
- system_module_info_json, project_path) # 所有产物信息
+ system_module_info_json, project_path,
+ extra_info=extra_product_info_dict) # collect product info from json file
result_dict: Dict[Text:Dict] = dict()
for d in product_dirs:
file_list: List[Text] = BasicTool.find_all_files(d)
@@ -167,18 +282,23 @@ class RomAnalyzer:
relative_filepath = f.replace(phone_dir, "").lstrip(os.sep)
unit: Dict[Text, Any] = product_info_dict.get(
relative_filepath)
- if unit is None:
+ if not unit:
+ bf = f.split('/')[-1]
+ unit: Dict[Text, Any] = product_info_dict.get(bf)
+ if not unit:
unit = dict()
unit["size"] = size
unit["relative_filepath"] = relative_filepath
- cls.__put(unit, result_dict)
+ cls.__put(unit, result_dict, rom_baseline_dict, add_baseline)
output_dir, _ = os.path.split(output_file)
if len(output_dir) != 0:
os.makedirs(output_dir, exist_ok=True)
+ if unit_adapt:
+ cls.result_unit_adaptive(result_dict)
with open(output_file + ".json", 'w', encoding='utf-8') as f:
f.write(json.dumps(result_dict, indent=4))
if output_execel:
- cls.__save_result_as_excel(result_dict, output_file)
+ cls.__save_result_as_excel(result_dict, output_file, add_baseline)
def get_args():
@@ -196,8 +316,12 @@ def get_args():
parser.add_argument("-d", "--product_dir", required=True, action="append",
help="subdirectories of out/{product_name}/packages/phone to be counted."
"eg: -d system -d vendor")
+ parser.add_argument("-b", "--baseline", action="store_true",
+ help="add baseline of component to the result(-b) or not.")
parser.add_argument("-o", "--output_file", type=str, default="rom_analysis_result",
help="basename of output file, default: rom_analysis_result. eg: demo/rom_analysis_result")
+ parser.add_argument("-u", "--unit_adaptive",
+ action="store_true", help="unit adaptive")
parser.add_argument("-e", "--excel", type=bool, default=False,
help="if output result as excel, default: False. eg: -e True")
args = parser.parse_args()
@@ -207,10 +331,12 @@ def get_args():
if __name__ == '__main__':
args = get_args()
module_info_json = args.module_info_json
- project_path = args.project_path
- product_name = args.product_name
+ project_origin_path = args.project_path
+ product = args.product_name
product_dirs = args.product_dir
- output_file = args.output_file
+ output_file_name = args.output_file
output_excel = args.excel
+ baseline_path = args.baseline
+ unit_adaptiv = args.unit_adaptive
RomAnalyzer.analysis(module_info_json, product_dirs,
- project_path, product_name, output_file, output_excel)
+ project_origin_path, product, output_file_name, output_excel, baseline_path, unit_adaptiv)
diff --git a/tools/startup_guard/README.md b/tools/startup_guard/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..68dae13364f5a5a084bbd718986981404c1fa6c3
--- /dev/null
+++ b/tools/startup_guard/README.md
@@ -0,0 +1,10 @@
+# 启动资源管控
+
+启动资源管控是对启动过程中配置文件(\*.cfg)和系统参数配置文件(\*.para", \*.para.dac)的管控, 约束各子模块的启动资源配置。通过对OpenHarmony镜像中的启动配置文件(\*.cfg)和系统参数配置文件(\*.para", \*.para.dac), 通过对白名单的检查, 完成对启动资源的管控。
+
+支持的拦截规则如下:
+
+| 规则名 | 规则说明 |
+| -------------------------------------------------------- | ------------------------------------------------------------ |
+| [NO-Config-Cmds-In-Init](rules/NO-Config-Cmds-In-Init/README.md) | 所有启动配置文件cmd管控 |
+| [NO-Config-SystemParameter-In-INIT](rules/NO-Config-SystemParameter-In-INIT/README.md) | 所有系统参数管控 |
diff --git a/tools/startup_guard/config_parser_mgr/__init__.py b/tools/startup_guard/config_parser_mgr/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..065104e03096d8111fb2144eae4e741edff79c04
--- /dev/null
+++ b/tools/startup_guard/config_parser_mgr/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .config_parser_mgr import ConfigParserMgr
diff --git a/tools/startup_guard/config_parser_mgr/cfg/config_parser.py b/tools/startup_guard/config_parser_mgr/cfg/config_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6a2c5eb7bc8ddbf489accaeaf687dbd06719137
--- /dev/null
+++ b/tools/startup_guard/config_parser_mgr/cfg/config_parser.py
@@ -0,0 +1,513 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import json
+import pprint
+
+def _create_arg_parser():
+ import argparse
+ parser = argparse.ArgumentParser(description='Collect init config information from xxxx/etc/init dir.')
+ parser.add_argument('-i', '--input',
+ help='input init config files base directory example "out/rk3568/packages/phone/" ',
+ action='append', required=True)
+
+ parser.add_argument('-o', '--output',
+ help='output init config information database directory', required=False)
+ parser.add_argument('-b', '--bootevent',
+ help='input bootevent file from system ', required=False)
+ return parser
+
+class ItemParser(dict):
+ def __init__(self, config):
+ self._config_parser = config
+ self["name"] = ""
+ def create(self, json_node, parent = None, fileId = None):
+ return
+
+ def update(self, json_node, parent = None, fileId = None):
+ return
+
+ def get_name(self):
+ return self["name"]
+
+ def get(self, key):
+ if self.__contains__(key):
+ return self[key]
+ return None
+
+ # get value form json array
+ def get_strings_value(self, jsonStrArray):
+ if jsonStrArray == None or len(jsonStrArray) == 0:
+ return ""
+
+ string = jsonStrArray[0]
+ for i in range(1, len(jsonStrArray)):
+ string = "{}@{}".format(string, jsonStrArray[i])
+ return string
+
+class CmdParser(ItemParser):
+ def __init__(self, config):
+ ItemParser.__init__(self, config)
+ self["content"] = ""
+ self["fileId"] = -1
+
+ def create(self, json_node, parent = None, fileId = None):
+ assert(isinstance(json_node, str))
+ assert(parent != None)
+ info = json_node.partition(" ") # 取第一个空格分割
+ self["name"] = info[0]
+ self["jobId"] = parent.get("jobId")
+ if fileId:
+ self["fileId"] = fileId
+ if len(info) > 2:
+ self["content"] = info[2]
+ return
+
+ def __str__(self):
+ return "cmd \"%s\" content \"%s\" " % (self["name"], self["content"])
+
+class JobParser(ItemParser):
+ def __init__(self, config):
+ ItemParser.__init__(self, config)
+ self["condition"] = ""
+ self["serviceId"] = -1
+ self["fileId"] = -1
+ self["jobPriority"] = -1
+ self["jobPriority"] = -1
+ self["executionTime"] = 0
+
+ def _add_cmds(self, cmdList, fileId):
+ for cmd in cmdList:
+ self._config_parser.add_cmd(cmd, self, fileId)
+
+ def create(self, json_node, parent = None, fileId = None):
+ assert(isinstance(json_node, dict))
+ self["name"] = json_node["name"]
+ self["jobId"] = self._config_parser.get_job_id()
+ self["jobPriority"] = self._config_parser.get_job_priority(json_node["name"])
+
+ if fileId and self["fileId"] is None:
+ self["fileId"] = fileId
+ if parent != None:
+ self["serviceId"] = parent.get("serviceId")
+
+ if json_node.__contains__("condition"):
+ self["condition"] = json_node.get("condition")
+ if json_node.__contains__("cmds"):
+ self._add_cmds(json_node.get("cmds"), fileId)
+
+ return
+
+ def update(self, json_node, parent = None, fileId = None):
+ assert(isinstance(json_node, dict))
+ if parent != None:
+ self["serviceId"] = parent.get("serviceId")
+ if fileId and self["fileId"] is None:
+ self["fileId"] = fileId
+ if json_node.__contains__("cmds"):
+ self._add_cmds(json_node.get("cmds"), fileId)
+ return
+
+ def __str__(self):
+ return "jobs '%s' condition '%s' " % (self["name"], self["condition"])
+
+class ServiceParser(ItemParser):
+ def __init__(self, config):
+ ItemParser.__init__(self, config)
+ self["critical_enable"] = False
+ self["limit_time"] = 20
+ self["limit_count"] = 4
+ self["importance"] = 0
+ self["once"] = False
+ self["console"] = False
+ self["notify_state"] = True
+ self["on_demand"] = False
+ self["sandbox"] = False
+ self["disabled"] = False
+ self["start_mode"] = "normal"
+ self["secon"] = ""
+ self["boot_job"] = ""
+ self["start_job"] = ""
+ self["stop_job"] = ""
+ self["restart_job"] = ""
+ self["path"] = ""
+ self["apl"] = ""
+ self["d_caps"] = ""
+ self["permission"] = ""
+ self["permission_acls"] = ""
+ self["fileId"] = -1
+
+ def _handle_string_filed(self, json_node):
+ str_field_map = {
+ "uid" : "uid", "caps":"caps", "start_mode":"start-mode", "secon":"secon", "apl":"apl"
+ }
+ for key, name in str_field_map.items():
+ if json_node.__contains__(name):
+ self[key] = json_node.get(name)
+
+ def _handle_integer_filed(self, json_node):
+ str_field_map = {
+ "importance" : "importance"
+ }
+ for key, name in str_field_map.items():
+ if json_node.__contains__(name):
+ self[key] = json_node.get(name)
+
+ def _handle_Bool_filed(self, json_node):
+ bool_field_map = {
+ "once" : "once", "console" : "console", "notify_state" : "notify_state",
+ "on_demand" : "ondemand", "sandbox" : "sandbox", "disabled" : "disabled",
+ "critical_enable" : "critical_enable"
+ }
+ for key, name in bool_field_map.items():
+ if json_node.__contains__(name):
+ value = json_node.get(name)
+ if isinstance(value, bool):
+ self[key] = value
+ elif isinstance(value, int):
+ self[key] = value != 0
+
+ def _handle_array_filed(self, json_node):
+ array_field_map = {
+ "path" : "path", "gid" : "gid", "cpu_core" : "cpucore", "caps":"caps", "write_pid":"writepid",
+ "d_caps":"d-caps", "permission":"permission", "permission_acls":"permission_acls",
+ }
+ for key, name in array_field_map.items():
+ if json_node.__contains__(name) :
+ self[key] = self.get_strings_value(json_node.get(name))
+
+ def _handle_scope_jobs(self, json_node):
+ job_field_map = {
+ "boot_job" : "on_boot", "start_job" : "on-start", "stop_job":"on-stop", "restart_job":"on-restart"
+ }
+ for key, name in job_field_map.items():
+ if json_node.__contains__(name):
+ self[key] = json_node.get(name)
+ self._config_parser.add_job({"name" : json_node.get(name)}, self, self["fileId"])
+
+ def create(self, json_node, parent = None, fileId = None):
+ assert(isinstance(json_node, dict))
+ self["name"] = json_node["name"]
+ if not self.get("serviceId") :
+ self["serviceId"] = self._config_parser.get_service_id()
+ if fileId :
+ self["fileId"] = fileId
+ self._handle_string_filed(json_node)
+ self._handle_Bool_filed(json_node)
+ self._handle_array_filed(json_node)
+ self._handle_integer_filed(json_node)
+
+ #for file
+ if json_node.__contains__("file"):
+ for item in json_node.get("file"):
+ self._config_parser.add_service_file(item, self)
+
+ #for socket
+ if json_node.__contains__("socket"):
+ for item in json_node.get("socket"):
+ self._config_parser.add_service_socket(item, self)
+ #for jobs
+ if json_node.__contains__("jobs"):
+ self._handle_scope_jobs(json_node.get("jobs"))
+
+ #for critical
+ if json_node.__contains__("critical"):
+ critical = json_node.get("critical")
+ if isinstance(critical, list):
+ self["critical_enable"] = int(critical[0]) != 0
+ self["limit_time"] = int(critical[0])
+ self["limit_count"] = int(critical[0])
+ else:
+ self["critical_enable"] = int(critical) != 0
+ return
+
+ def update(self, json_node, parent = None, fileId = None):
+ self.create(json_node, parent, fileId)
+ return
+
+class ServiceSocketParser(ItemParser):
+ def __init__(self, config):
+ ItemParser.__init__(self, config)
+ self["family"] = ""
+ self["type"] = ""
+ self["protocol"] = ""
+ self["permissions"] = ""
+ self["uid"] = ""
+ self["gid"] = ""
+ self["serviceId"] = -1
+
+ def create(self, json_node, parent = None, file_id = None):
+ assert(isinstance(json_node, dict))
+ self["name"] = json_node["name"]
+ if parent != None:
+ self["serviceId"] = parent.get("serviceId")
+ fields = ["family", "type", "protocol", "permissions", "uid", "gid"]
+ for field in fields:
+ if json_node.get(field) :
+ self[field] = json_node.get(field)
+ if json_node.get("option") :
+ self["option"] = self.get_strings_value(json_node.get("option"))
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "socket '%s' serviceid = %d family %s" % (self["name"], self["serviceId"], self["family"])
+
+class ServiceFileParser(ItemParser):
+ def __init__(self, config):
+ ItemParser.__init__(self, config)
+ self["name"] = ""
+ self["content"] = ""
+ self["serviceId"] = -1
+
+ def create(self, json_node, parent = None, file_id = None):
+ assert(isinstance(json_node, str))
+ if parent != None:
+ self["serviceId"] = parent.get("serviceId")
+ info = json_node.partition(" ")
+ self["name"] = info[0]
+ if len(info) > 2:
+ self["content"] = info[2]
+ return
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "file '%s' serviceid = %d content '%s'" % (self["name"], self["serviceId"], self["content"])
+
+class ConfigParser():
+ def __init__(self, path):
+ self._path = path
+ self._jobs = {}
+ self._files = {}
+ self._cmds = []
+ self._services = {}
+ self._serviceSockets = {}
+ self._serviceFiles = {}
+ self._jobId = 0
+ self._file_id = 0
+ self._serviceId = 0
+ self._selinux = ""
+
+ def _load_services(self, json_node, file_id):
+ assert(isinstance(json_node, list))
+ for item in json_node:
+ self.add_service(item, file_id)
+ return
+
+ def _load_jobs(self, json_node, file_id):
+ assert(isinstance(json_node, list))
+ for item in json_node:
+ self.add_job(item, None, file_id)
+ return
+
+ def _load_import(self, import_node):
+ assert(isinstance(import_node, list))
+ start_with = [ "/system", "/chip_prod", "/sys_prod", "/vendor" ]
+ for file in import_node:
+ found = False
+ for start in start_with:
+ if file.startswith(start):
+ found = True
+ break
+ if found :
+ self.load_config(self._path + file)
+ else:
+ for start in start_with:
+ self.load_config(self._path + start + file, file)
+
+ def load_config(self, file_name):
+ path = self._path + file_name
+ if not os.path.exists(path):
+ print("Error, invalid config file %s" % path)
+ return
+ with open(path, encoding='utf-8') as content:
+ try:
+ root = json.load(content)
+ fileId = self.add_File(file_name)
+ assert(isinstance(root, dict))
+ if (root.__contains__("services")):
+ self._load_services(root["services"], fileId)
+ if (root.__contains__("jobs")):
+ self._load_jobs(root["jobs"], fileId)
+ if (root.__contains__("import")):
+ self._load_import(root["import"])
+ pass
+ except:
+ pass
+
+ def add_File(self, file_name):
+ if self._files.get(file_name):
+ return self._files.get(file_name).get("fileId")
+ self._file_id = self._file_id + 1
+ self._files[file_name] = {
+ "fileId" : self._file_id,
+ "file_name" : file_name
+ }
+ return self._files[file_name].get("fileId")
+
+ def add_job(self, item, service, file_id):
+ if self._jobs.get(item.get("name")):
+ self._jobs.get(item.get("name")).update(item, service, file_id)
+ return
+ parser = JobParser(self)
+ parser.create(item, service, file_id)
+ self._jobs[parser.get_name()] = parser
+
+ def add_cmd(self, item, job, file_id):
+ parser = CmdParser(self)
+ parser.create(item, job, file_id)
+ self._cmds.append(parser)
+
+ def add_service(self, item, file_id):
+ if self._services.get(item.get("name")):
+ self._services.get(item.get("name")).update(item)
+ return
+ parser = ServiceParser(self)
+ parser.create(item, None, file_id)
+ self._services[parser.get("name")] = parser
+
+ def add_service_socket(self, item, service):
+ parser = ServiceSocketParser(self)
+ parser.create(item, service)
+ self._serviceSockets[parser.get_name()] = parser
+
+ def add_service_file(self, item, service):
+ parser = ServiceFileParser(self)
+ parser.create(item, service)
+ self._serviceFiles[parser.get_name()] = parser
+
+ def get_job_id(self):
+ self._jobId = self._jobId + 1
+ return self._jobId
+
+ def get_service_id(self):
+ self._serviceId = self._serviceId + 1
+ return self._serviceId
+
+ def dump_config(self):
+ pp = pprint.PrettyPrinter(indent = 0, compact=True)
+ pp.pprint(self._jobs)
+ pass
+
+ def _is_valid_file(self, file):
+ valid_file_ext = [".cfg"]
+ if not file.is_file():
+ return False
+ for ext in valid_file_ext:
+ if file.name.endswith(ext):
+ return True
+ return False
+
+ def _scan_config_file(self, file_name):
+ dir_config_file = os.path.join(self._path, file_name)
+ if not os.path.exists(dir_config_file):
+ return
+ try:
+ with os.scandir(dir_config_file) as files:
+ for file in files:
+ if self._is_valid_file(file):
+ name = file.path[len(self._path) :]
+ self.load_config(name)
+ except:
+ pass
+
+ def scan_config(self):
+ config_paths = [
+ "/system/etc/init",
+ "/chip_prod/etc/init",
+ "/sys_prod/etc/init",
+ "/vendor/etc/init",
+ ]
+ for file_name in config_paths:
+ self._scan_config_file(file_name)
+
+ def get_job_priority(self, job_name):
+ job_priority = {
+ "pre-init" : 0,
+ "init" : 1,
+ "post-init" : 2,
+ "early-fs" : 3,
+ "fs" : 4,
+ "post-fs" : 5,
+ "late-fs" : 6,
+ "post-fs-data" : 7,
+ "firmware_mounts_complete" : 8,
+ "early-boot" : 9,
+ "boot" : 10
+ }
+
+ if (job_priority.__contains__(job_name)):
+ return job_priority.get(job_name)
+ return 100
+
+ def _load_boot_event(self, event):
+ if self._jobs.__contains__(event.get("name")):
+ print("loadBootEvent_ %s %f" % (event.get("name"), event.get("dur")))
+ self._jobs.get(event.get("name"))["executionTime"] = event.get("dur")
+
+ def load_boot_event_file(self, boot_event_file):
+ if not os.path.exists(boot_event_file):
+ print("Error, invalid config file %s" % boot_event_file)
+ return
+ with open(boot_event_file, encoding='utf-8') as content:
+ try:
+ root = json.load(content)
+ for item in root:
+ self._load_boot_event(item)
+ except:
+ pass
+ pass
+
+ def load_selinux_config(self, file_name):
+ path = os.path.join(self._path, file_name)
+ if not os.path.exists(path):
+ print("Error, invalid selinux config file %s" % path)
+ return
+ try:
+ with open(path, encoding='utf-8') as fp:
+ line = fp.readline()
+ while line :
+ if line.startswith("#") or len(line) < 3:
+ line = fp.readline()
+ continue
+ param_Info = line.partition("=")
+ if len(param_Info) != 3:
+ line = fp.readline()
+ continue
+ if param_Info[0].strip() == "SELINUX":
+ self._selinux = param_Info[2].strip()
+ line = fp.readline()
+ except:
+ print("Error, invalid parameter file ", file_name)
+ pass
+
+def startup_config_collect(base_path):
+ parser = ConfigParser(os.path.join(base_path, "packages/phone"))
+ parser.load_config("/system/etc/init.cfg")
+ parser.scan_config()
+ parser.load_selinux_config("/system/etc/selinux/config")
+ return parser
+
+if __name__ == '__main__':
+ args_parser = _create_arg_parser()
+ options = args_parser.parse_args()
+ startup_config_collect(options.input)
diff --git a/tools/startup_guard/config_parser_mgr/config_parser_mgr.py b/tools/startup_guard/config_parser_mgr/config_parser_mgr.py
new file mode 100644
index 0000000000000000000000000000000000000000..e6e81542cb6c36ec4724b68f71cd65c9e6ce20e9
--- /dev/null
+++ b/tools/startup_guard/config_parser_mgr/config_parser_mgr.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import sys
+sys.path.append('.')
+
+from .param.system_parameter_parser import parameters_collect
+from .cfg.config_parser import startup_config_collect
+
+def __create_arg_parser():
+ import argparse
+ parser = argparse.ArgumentParser(description='Check startup architecture information from compiled output files.')
+ parser.add_argument('-i', '--input',
+ help='input config files base directory example "out/rk3568/packages/phone/" ', required=True)
+ return parser
+
+class ConfigParserMgr(object):
+ def __init__(self, path = None):
+ self._path = path
+ self._parser_list = {}
+
+ def load_all_parser(self, options):
+ cfg_parser = startup_config_collect(options)
+ param_parser = parameters_collect(options)
+ self._parser_list = {'cmd_whitelist':cfg_parser, 'system_parameter_whitelist':param_parser}
+
+ def get_parser_by_name(self, key):
+ if key:
+ return self._parser_list[key]
+
+if __name__ == '__main__':
+ args_parser = __create_arg_parser()
+ options = args_parser.parse_args()
+ mgr = ConfigParserMgr()
+ mgr.load_all_parser(options)
diff --git a/tools/startup_guard/config_parser_mgr/param/system_parameter_parser.py b/tools/startup_guard/config_parser_mgr/param/system_parameter_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..46bcfbde3f991c98e156ba27228e2d617cefc316
--- /dev/null
+++ b/tools/startup_guard/config_parser_mgr/param/system_parameter_parser.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+
+class ParameterParser(dict):
+ def __init__(self, prefix, parameter = None):
+ self["prefix"] = prefix
+ if parameter == None:
+ self["type"] = "string"
+ self["dacUser"] = ""
+ self["dacGroup"] = ""
+ self["dacMode"] = 0
+ self["selinuxLabel"] = ""
+ self["value"] = ""
+ else:
+ self["type"] = parameter.get("type")
+ self["dacUser"] = parameter.get("dacUser")
+ self["dacGroup"] = parameter.get("dacGroup")
+ self["dacMode"] = parameter.get("dacMode")
+ self["selinuxLabel"] = parameter.get("selinuxLabel")
+ self["value"] = parameter.get("value")
+
+ def decode(self, info):
+ self["value"] = info.strip("\"").strip("\'")
+ return True
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "%s= DAC[%s:%s:%s] selinux[%s] value=%s" % (
+ self["prefix"], self["dacUser"], self["dacGroup"], self["dacMode"],
+ self["selinuxLabel"], self["value"])
+
+class ParameterDacParser(ParameterParser):
+ def __init__(self, prefix, parameter=None):
+ ParameterParser.__init__(self, prefix, parameter)
+
+ def decode(self, info):
+ dacInfo = info.strip("\"").strip("\'").split(":")
+ if len(dacInfo) < 3:
+ print("Invalid dac %s" % info)
+ return False
+
+ self["dacUser"] = dacInfo[0]
+ self["dacGroup"] = dacInfo[1]
+ self["dacMode"] = dacInfo[2]
+ if len(dacInfo) > 3:
+ self["type"] = dacInfo[3]
+ return True
+
+class ParameterSelinuxParser(ParameterParser):
+ def __init__(self, prefix, parameter=None):
+ ParameterParser.__init__(self, prefix, parameter)
+
+ def decode(self, info):
+ self["selinuxLabel"] = info
+ return True
+
+class ParameterFileParser():
+ def __init__(self):
+ self._parameters = {}
+
+ def _handle_param_info(self, file_name, param_info):
+ param_name = param_info[0].strip()
+ old_param = self._parameters.get(param_name)
+ if file_name.endswith(".para.dac"):
+ param = ParameterDacParser(param_name, old_param)
+ if (param.decode(param_info[2].strip())):
+ self._parameters[param_name] = param
+ elif file_name.endswith(".para"):
+ param = ParameterParser(param_name, old_param)
+ if (param.decode(param_info[2].strip())):
+ self._parameters[param_name] = param
+ else:
+ param = ParameterSelinuxParser(param_name, old_param)
+ if (param.decode(param_info[2].strip())):
+ self._parameters[param_name] = param
+
+ def load_parameter_file(self, file_name, str = "="):
+ try:
+ with open(file_name, encoding='utf-8') as fp:
+ line = fp.readline()
+ while line :
+ if line.startswith("#") or len(line) < 3:
+ line = fp.readline()
+ continue
+ paramInfo = line.partition(str)
+ if len (paramInfo) != 3:
+ line = fp.readline()
+ continue
+ self._handle_param_info(file_name, paramInfo)
+ line = fp.readline()
+ except:
+ print("Error, invalid parameter file ", file_name)
+ pass
+
+ def dump_parameter(self):
+ for param in self._parameters.values():
+ print(str(param))
+
+ def _check_file(self, file):
+ valid_file_ext = [".para", ".para.dac"]
+ if not file.is_file():
+ return False
+ for ext in valid_file_ext:
+ if file.name.endswith(ext):
+ return True
+ return False
+
+ def _scan_parameter_file(self, dir):
+ if not os.path.exists(dir):
+ return
+ with os.scandir(dir) as files:
+ for file in files:
+ if self._check_file(file):
+ self.load_parameter_file(file.path)
+
+ def scan_parameter_file(self, dir):
+ parameter_paths = [
+ "/system/etc/param/ohos_const",
+ "/vendor/etc/param",
+ "/chip_prod/etc/param",
+ "/sys_prod/etc/param",
+ "/system/etc/param",
+ ]
+ for path in parameter_paths:
+ self._scan_parameter_file("{}/packages/phone{}".format(dir, path))
+
+def __create_arg_parser():
+ import argparse
+ parser = argparse.ArgumentParser(description='Collect parameter information from xxxx/etc/param dir.')
+ parser.add_argument('-i', '--input',
+ help='input parameter files base directory example "out/rk3568/packages/phone/" ',
+ required=True)
+ return parser
+
+def parameters_collect(base_path):
+ parser = ParameterFileParser()
+ parser.scan_parameter_file(base_path)
+ parser.load_parameter_file(
+ "{}/packages/phone/system/etc/selinux/targeted/contexts/parameter_contexts".format(base_path),
+ " ")
+ return parser
+
+if __name__ == '__main__':
+ args_parser = __create_arg_parser()
+ options = args_parser.parse_args()
+ parameters_collect(options.input)
diff --git a/tools/startup_guard/rules/NO-Config-Cmds-In-Init/README.md b/tools/startup_guard/rules/NO-Config-Cmds-In-Init/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..0fd81e663e2e757d64611c55cea78b333fd5b6c5
--- /dev/null
+++ b/tools/startup_guard/rules/NO-Config-Cmds-In-Init/README.md
@@ -0,0 +1,158 @@
+# cfg文件白名单规则说明
+
+## 规则解释
+ 白名单严格遵循JSON格式。
+
+ ### **耗时命令约束**
+ **[白名单](whitelist.json)** 约束*.cfg文件中的耗时命令。
+
+ - 耗时命令
+
+ 命令执行超过200ms的命令行。
+
+ - 规则要求
+
+ 1. 命令行执行时间超过200ms。
+ 2. 耗时命令在白名单中配置。
+ 3. 耗时命令文件路径在白名单中配置。
+
+ - 白名单信息解释
+ ```
+ {
+ "cmd":"init_global_key",
+ "location":[
+ "/system/etc/init.cfg",
+ ...
+ ]
+ }
+ ```
+ 1. cmd: 命令行
+ 2. location: 耗时命令文件路径
+
+ - 解决方法
+ 1. 检查违规项是否是耗时命令。
+ 2. 检查该命令是否包含白名单中。
+ 3. 检查该命令存在的文件路径是否包含在白名单中。
+ 3. 根据评审结果添加命令到白名单。
+
+### **condition服务约束**
+ **[白名单](whitelist.json)** 约束*.cfg文件中的service的启动方式:condition。
+
+ - condition
+
+ condition 条件启动,对服务的启动方式配置condition,通过start命令拉起服务。
+
+ - 规则要求
+ 1. 服务不是按需启动,即"ondemand" : false。
+ 2. 服务是条件启动, 即"start-mode" : "condition", 并且通过start命令拉起服务。
+ 3. 服务在白名单中。
+
+ - 白名单信息解释
+ ```
+ {
+ "start-modes": [
+ {
+ "start-mode":"condition",
+ "service": [
+ "hilogd",
+ ...
+ ]
+ }
+ ]
+ }
+ ```
+ 1. start-mode:"condition", 服务启动方式。
+ 2. service:通过"start-mode" : "condition" 启动的服务。
+
+ - 解决方法
+ 1. 检查服务否是按需启动。
+ 2. 检查服务是否配置条件启动, 且通过start命令拉起服务。
+ 3. 检查服务是否在白名单中。
+ 4. 根据评审结果添加服务到白名单。
+
+### **boot服务约束**
+ **[白名单](whitelist.json)** 约束*.cfg文件中的service的启动方式:boot。
+
+ - boot
+
+ 在init job阶段启动,其服务的启动方式配置 boot。
+
+ - 规则要求
+ 1. 服务是boot启动, 即"start-mode" : "boot"的服务。
+ 2. 服务在白名单中。
+
+ - 白名单信息解释
+ ```
+ {
+ "start-modes": [
+ {
+ "start-mode":"boot",
+ "service": [
+ "hiview",
+ ...
+ ]
+ }
+ ]
+ }
+ ```
+ 1. start-mode:"boot",服务启动方式。
+ 2. service:通过"start-mode" : "boot" 启动的服务。
+
+ - 解决方法
+ 1. 检查服务是否是boot启动。
+ 2. 检查服务是否在白名单中。
+ 3. 根据评审结果添加服务到白名单。
+
+### **start命令约束**
+ **[白名单](whitelist.json)** 约束*.cfg文件中的通过start执行的命令。
+
+ - start
+
+ 通过start拉起的服务。
+
+ - 规则要求
+ 1. 通过start命令执行的命令行。
+ 2. 命令行在白名单中。
+
+ - 白名单信息解释
+ ```
+ {
+ "start-cmd": [
+ "ueventd",
+ ...
+ ]
+ }
+ ```
+ start-cmd: 执行start命令行。
+
+ - 解决方法
+ 1. 检查命令是否是start命令。
+ 2. 检查命令行是否在白名单中。
+ 3. 根据评审结果添加命令到白名单。
+
+ ### **selinux约束**
+ - secon
+
+ 服务的selinux标签
+
+ - 规则要求
+ 1. 服务配置没有配置"secon"。
+ 2. 配置配置的"secon"为空。
+
+ - 解决方法
+ 1. 检查服务是否配置"secon", 且"secon"的配置不为空。
+ 2. 根据要求修改服务"secon"配置
+
+编译时会提示如下类型的告警:
+ ```
+ [NOT ALLOWED]: 'init_global_key' is timeout command, in /system/etc/init.cfg
+ [NOT ALLOWED]: xxx 'secon' is empty
+ [WARNING]: 'storage_daemon' cannot be started in boot mode
+ [WARNING]: 'hilogd' cannot be started in conditional mode
+ [WARNING]: selinux status is xxx
+ [WARNING]: multimodalinput is not in start cmd list. path:/system/etc/init/multimodalinput.cfg
+```
+
+## 违规场景及处理方案建议
+ 1. 服务默认按照并行启动配置,如果需要添加白名单,需要评审。
+ 2. 根据 **[规则解释](README.md#规则解释)** 排查修改。
diff --git a/tools/startup_guard/rules/NO-Config-Cmds-In-Init/whitelist.json b/tools/startup_guard/rules/NO-Config-Cmds-In-Init/whitelist.json
new file mode 100755
index 0000000000000000000000000000000000000000..0d21b1cd4c6d21d0af8ad9898b8fca22364e4408
--- /dev/null
+++ b/tools/startup_guard/rules/NO-Config-Cmds-In-Init/whitelist.json
@@ -0,0 +1,91 @@
+[
+ {
+ "cmds": [
+ {
+ "cmd":"mount_fstab",
+ "location":[
+ "/system/etc/init.cfg"
+ ]
+ }, {
+ "cmd":"load_access_token_id",
+ "location":[
+ "/system/etc/init/access_token.cfg"
+ ]
+ }, {
+ "cmd":"init_global_key",
+ "location":[
+ "/system/etc/init.cfg"
+ ]
+ }],
+ "start-modes": [
+ {
+ "start-mode":"condition",
+ "service":[
+ "hilogd",
+ "wifi_hal_service",
+ "hdcd",
+ "hiprofilerd",
+ "pulseaudio",
+ "huks_service",
+ "hiprofiler_plugins",
+ "watchdog_service",
+ "pinauth",
+ "hiprofiler_daemon",
+ "multimodalinput",
+ "udevd_service",
+ "mmi_uinput_service"
+ ]
+ }, {
+ "start-mode":"boot",
+ "service":[
+ "hiview",
+ "storage_daemon",
+ "samgr",
+ "thermal_protector",
+ "appspawn",
+ "param_watcher",
+ "device_manager",
+ "storage_manager",
+ "hdf_devmgr"
+ ]
+ }
+ ],
+ "start-cmd":[
+ "ueventd",
+ "watchdog_service",
+ "deviceauth_service",
+ "screenlock_server",
+ "resource_schedule_service",
+ "storage_daemon",
+ "bluetooth_service",
+ "hilogd",
+ "device_usage_stats_service",
+ "wifi_hal_service",
+ "hdcd",
+ "bgtaskmgr_service",
+ "module_update_service",
+ "hiprofilerd",
+ "hiprofiler_plugins",
+ "hiprofiler_daemon",
+ "pulseaudio",
+ "audio_host",
+ "audio_policy",
+ "huks_service",
+ "memmgrservice",
+ "netmanager",
+ "pinauth",
+ "updater_sa",
+ "telephony_sa",
+ "devattest_service",
+ "msdp_sa",
+ "accessibility",
+ "wallpaper_service",
+ "time_service",
+ "udevd_service",
+ "mmi_uinput_service",
+ "multimodalinput",
+ "hdf_devhost",
+ "concurrent_task_service"
+ ]
+ }
+]
diff --git a/tools/startup_guard/rules/NO-Config-SystemParameter-In-INIT/README.md b/tools/startup_guard/rules/NO-Config-SystemParameter-In-INIT/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..d0bb419e3b6123561092be92fc17dc768f330e12
--- /dev/null
+++ b/tools/startup_guard/rules/NO-Config-SystemParameter-In-INIT/README.md
@@ -0,0 +1,31 @@
+# 系统参数白名单规则说明
+
+ ## **系统参数命名约束**
+ - 规则要求
+ 1. 约束\*.para", \*.para.dac 配置文件中的系统参数。
+ 2. 系统参数命名由:字母、数字、下划线、'.'、 '-'、 '@'、 ':' 、 '_'。
+ 3. 不允许出现".."。
+
+ - 解决方法
+ 1. 根据系统参数的命名规范排查修改。
+
+ ## dac配置内存大小约束
+ - 规则要求
+ 1. dac配置不超过200个。
+
+ - 解决方法
+ 1. 重新配置dac内存大小, 修改 "startup/init/services/param/include/param_osadp.h" 中PARAM_WORKSPACE_DAC。
+ 2. 修改 "startup_guard/startup_checker/system_parameter_rules.py" 中CONFIG_DAC_MAX_NUM = 200的大小。
+
+编译时会提示如下类型的告警:
+
+ ```
+ [NOT ALLOWED]: Invalid param: distributedsched.continuationmanager..
+ [NOT ALLOWED]: DAC overallocated memory
+
+ ```
+
+# 违规场景及处理方案建议
+
+ 1. 检查系统参数命名, 根据 **[系统参数命名约束](README.md#系统参数命名约束)** 排查修改。
+ 2. 排查dac配置是否超出内存申请范围, 根据 **[dac配置内存大小约束](README.md#dac配置内存大小约束)** 排查修改。
diff --git a/tools/startup_guard/rules/NO-Config-SystemParameter-In-INIT/whitelist.json b/tools/startup_guard/rules/NO-Config-SystemParameter-In-INIT/whitelist.json
new file mode 100755
index 0000000000000000000000000000000000000000..47910154fa76009f1e02e975c65e9a8d18a821b7
--- /dev/null
+++ b/tools/startup_guard/rules/NO-Config-SystemParameter-In-INIT/whitelist.json
@@ -0,0 +1,377 @@
+[
+ "const.ohos.version.security_patch",
+ "const.ohos.releasetype",
+ "const.ohos.apiversion",
+ "const.ohos.fullname",
+ "persist.thermal.log.enable",
+ "persist.thermal.log.interval",
+ "persist.thermal.log.width",
+ "persist.thermal.log.",
+ "sys.usb.config",
+ "const.product.hardwareversion",
+ "const.product.brand",
+ "const.build.product",
+ "accesstoken.permission.change",
+ "persist.resourceschedule.memmgr.eswap.permanently.closed",
+ "persist.resourceschedule.memmgr.eswap.minsToday",
+ "persist.resourceschedule.memmgr.eswap.swapOutKBToday",
+ "persist.resourceschedule.memmgr.eswap.minsFromBirth",
+ "persist.resourceschedule.memmgr.eswap.swapOutKBFromBirth",
+ "const.cust.",
+ "persist.darkmode",
+ "sys.usb.ffs.ready",
+ "persist.hdc.root",
+ "persist.hdc.mode",
+ "persist.hdc.port",
+ "persist.hdc.uv.threads",
+ "persist.hdc.control",
+ "persist.hdc.control.shell",
+ "persist.hdc.control.file",
+ "persist.hdc.control.fport",
+ "accessibility.config.ready",
+ "bootevent.appfwk.ready",
+ "component.startup.newRules",
+ "component.startup.newRules.except.LauncherSystemUI",
+ "component.startup.backgroundJudge.flag",
+ "component.startup.whitelist.associatedWakeUp",
+ "netmanager_base.",
+ "persist.netmanager_base.",
+ "const.distributed_file_property.enabled",
+ "const.distributed_file_only_for_same_account_test",
+ "const.telephony.slotCount",
+ "telephony.sim.opkey0",
+ "telephony.sim.opkey1",
+ "persist.sys.default_ime",
+ "ohos.servicectrl.",
+ "ohos.servicectrl.reboot.",
+ "ohos.boot.",
+ "bootevent.",
+ "startup.service.ctl.",
+ "startup.device.",
+ "const.debuggable",
+ "const.build.",
+ "const.SystemCapability.",
+ "const.product.",
+ "persist.init.",
+ "startup.appspawn.",
+ "startup.uevent.",
+ "persist.init.debug.",
+ "persist.init.bootevent.enable",
+ "persist.appspawn.",
+ "persist.xts.devattest.",
+ "ohos.boot.sn",
+ "const.product.udid",
+ "const.product.devUdid",
+ "const.actionable_compatible_property.enabled",
+ "const.postinstall.fstab.prefix",
+ "const.secure",
+ "security.perf_harden",
+ "const.allow.mock.location",
+ "persist.sys.usb.config",
+ "persist.window.boot.",
+ "debug.bytrace.",
+ "persist.distributed_hardware.device_manager.",
+ "persist.sys.hiview.",
+ "vendor.hifi.watchdog.come",
+ "bootevent.wms.fullscreen.ready",
+ "persist.pasteboard.",
+ "const.theme.screenlockWhiteApp",
+ "const.theme.screenlockApp",
+ "persist.global.language",
+ "persist.global.locale",
+ "persist.global.is24Hour",
+ "persist.sys.preferredLanguages",
+ "const.global.locale",
+ "const.global.language",
+ "const.global.region",
+ "bootevent.useriam.fwkready",
+ "hiviewdfx.hiperf.perf_event_max_sample_rate",
+ "hiviewdfx.hiperf.perf_cpu_time_max_percent",
+ "hiviewdfx.hiperf.perf_event_mlock_kb",
+ "persist.filemanagement.usb.readonly",
+ "debug.hitrace.tags.enableflags",
+ "hilog.",
+ "persist.sys.hilog.",
+ "bootevent.samgr.ready",
+ "persist.samgr.perf.ondemand",
+ "persist.sys.graphic.animationscale",
+ "debug.graphic.",
+ "musl.log.enable",
+ "musl.log.ld.all",
+ "const.display.brightness.",
+ "persist.sys.arkui.animationscale",
+ "const.build.characteristics",
+ "const.product.model",
+ "const.product.name",
+ "const.sandbox",
+ "const.product.devicetype",
+ "const.software.model",
+ "const.product.manufacturer",
+ "const.product.bootloader.version",
+ "const.product.cpu.abilist",
+ "const.product.software.version",
+ "const.product.incremental.version",
+ "const.product.firstapiversion",
+ "const.product.build.type",
+ "const.product.build.user",
+ "const.product.build.host",
+ "const.product.build.date",
+ "const.product.hardwareprofile",
+ "const.ohos.buildroothash",
+ "backup.debug.overrideExtensionConfig",
+ "persist.netmanager_base.http_proxy.host",
+ "persist.netmanager_base.http_proxy.port",
+ "persist.netmanager_base.http_proxy.exclusion_list",
+ "persist.distributed_hardware.device_manager.discover_status",
+ "input.pointer.device",
+ "hiviewdfx.hiprofiler.profilerd.start",
+ "hiviewdfx.hiprofiler.plugins.start",
+ "hiviewdfx.hiprofiler.native_memoryd.start",
+ "libc.hook_mode",
+ "persist.multimedia.audio.mediavolume",
+ "const.SystemCapability.ArkUI.UiAppearance",
+ "const.SystemCapability.ArkUI.ArkUI.Full",
+ "const.SystemCapability.ArkUI.ArkUI.Napi",
+ "const.SystemCapability.ArkUI.ArkUI.Libuv",
+ "const.SystemCapability.Account.AppAccount",
+ "const.SystemCapability.Account.OsAccount",
+ "const.SystemCapability.DistributedDataManager.KVStore.Core",
+ "const.SystemCapability.DistributedDataManager.KVStore.Lite",
+ "const.SystemCapability.DistributedDataManager.KVStore.DistributedKVStore",
+ "const.SystemCapability.DistributedDataManager.Preferences.Core",
+ "const.SystemCapability.DistributedDataManager.DataObject.DistributedObject",
+ "const.SystemCapability.DistributedDataManager.RelationalStore.Core",
+ "const.SystemCapability.DistributedDataManager.DataShare.Core",
+ "const.SystemCapability.DistributedDataManager.DataShare.Consumer",
+ "const.SystemCapability.DistributedDataManager.DataShare.Provider",
+ "const.SystemCapability.MiscServices.Pasteboard",
+ "const.SystemCapability.Security.AccessToken",
+ "const.SystemCapability.Security.DeviceSecurityLevel",
+ "const.SystemCapability.Security.DataTransitManager",
+ "const.SystemCapability.Security.DeviceAuth",
+ "const.SystemCapability.Security.AppVerify",
+ "const.SystemCapability.Security.CertificateManager",
+ "const.SystemCapability.Security.Huks",
+ "const.SystemCapability.Security.Cipher",
+ "const.SystemCapability.Security.CryptoFramework",
+ "const.SystemCapability.Security.Cert",
+ "const.SystemCapability.UserIAM.UserAuth.FaceAuth",
+ "const.SystemCapability.UserIAM.UserAuth.PinAuth",
+ "const.SystemCapability.UserIAM.UserAuth.Core",
+ "const.SystemCapability.UserIAM.UserAuth.FingerprintAuth",
+ "const.SystemCapability.Startup.SystemInfo",
+ "const.SystemCapability.HiviewDFX.HiLog",
+ "const.SystemCapability.HiviewDFX.HiTrace",
+ "const.SystemCapability.HiviewDFX.HiSysEvent",
+ "const.SystemCapability.HiviewDFX.HiAppEvent",
+ "const.SystemCapability.HiviewDFX.Hiview",
+ "const.SystemCapability.HiviewDFX.Hiview.FaultLogger",
+ "const.SystemCapability.HiviewDFX.HiChecker",
+ "const.SystemCapability.HiviewDFX.HiDumper",
+ "const.SystemCapability.Utils.Lang",
+ "const.SystemCapability.BundleManager.BundleTool",
+ "const.SystemCapability.BundleManager.DistributedBundleFramework",
+ "const.SystemCapability.BundleManager.BundleFramework",
+ "const.SystemCapability.BundleManager.Zlib",
+ "const.SystemCapability.BundleManager.BundleFramework.AppControl",
+ "const.SystemCapability.BundleManager.BundleFramework.Core",
+ "const.SystemCapability.BundleManager.BundleFramework.FreeInstall",
+ "const.SystemCapability.BundleManager.BundleFramework.Launcher",
+ "const.SystemCapability.BundleManager.BundleFramework.DefaultApp",
+ "const.SystemCapability.BundleManager.BundleFramework.Resource",
+ "const.SystemCapability.Ability.AbilityBase",
+ "const.SystemCapability.Ability.DistributedAbilityManager",
+ "const.SystemCapability.Ability.AbilityRuntime.Core",
+ "const.SystemCapability.Ability.AbilityRuntime.FAModel",
+ "const.SystemCapability.Ability.AbilityRuntime.AbilityCore",
+ "const.SystemCapability.Ability.AbilityRuntime.Mission",
+ "const.SystemCapability.Ability.AbilityRuntime.QuickFix",
+ "const.SystemCapability.Ability.AbilityTools.AbilityAssistant",
+ "const.SystemCapability.Ability.Form",
+ "const.SystemCapability.Notification.Emitter",
+ "const.SystemCapability.Notification.Notification",
+ "const.SystemCapability.Notification.ReminderAgent",
+ "const.SystemCapability.Notification.CommonEvent",
+ "const.SystemCapability.Communication.SoftBus.Core",
+ "const.SystemCapability.Communication.NetManager.Core",
+ "const.SystemCapability.Communication.Bluetooth.Core",
+ "const.SystemCapability.Communication.Bluetooth.Lite",
+ "const.SystemCapability.Communication.NetStack",
+ "const.SystemCapability.Communication.WiFi.STA",
+ "const.SystemCapability.Communication.WiFi.AP.Core",
+ "const.SystemCapability.Communication.WiFi.P2P",
+ "const.SystemCapability.Communication.WiFi.Core",
+ "const.SystemCapability.Communication.IPC.Core",
+ "const.SystemCapability.Communication.NetManager.Ethernet",
+ "const.SystemCapability.Communication.NetManager.NetSharing",
+ "const.SystemCapability.Communication.NetManager.MDNS",
+ "const.SystemCapability.Communication.NetManager.Vpn",
+ "const.SystemCapability.Location.Location.Core",
+ "const.SystemCapability.Location.Location.Gnss",
+ "const.SystemCapability.Location.Location.Geofence",
+ "const.SystemCapability.Location.Location.Geocoder",
+ "const.SystemCapability.Location.Location.Lite",
+ "const.SystemCapability.Update.UpdateService",
+ "const.SystemCapability.HiviewDFX.HiProfiler.HiDebug",
+ "const.SystemCapability.Developtools.Syscap",
+ "const.SystemCapability.Sensors.Sensor",
+ "const.SystemCapability.Sensors.MiscDevice",
+ "const.SystemCapability.Graphic.Graphic2D.ColorManager.Core",
+ "const.SystemCapability.Graphic.Graphic2D.EGL",
+ "const.SystemCapability.Graphic.Graphic2D.GLES3",
+ "const.SystemCapability.Graphic.Graphic2D.NativeWindow",
+ "const.SystemCapability.Graphic.Graphic2D.NativeDrawing",
+ "const.SystemCapability.Graphic.Graphic2D.WebGL",
+ "const.SystemCapability.Graphic.Graphic2D.WebGL2",
+ "const.SystemCapability.WindowManager.WindowManager.Core",
+ "const.SystemCapability.MiscServices.Time",
+ "const.SystemCapability.MiscServices.InputMethodFramework",
+ "const.SystemCapability.MiscServices.Download",
+ "const.SystemCapability.MiscServices.Upload",
+ "const.SystemCapability.Print.PrintFramework",
+ "const.SystemCapability.MiscServices.ScreenLock",
+ "const.SystemCapability.MiscServices.Wallpaper",
+ "const.SystemCapability.Multimedia.Audio.Core",
+ "const.SystemCapability.Multimedia.Audio.Renderer",
+ "const.SystemCapability.Multimedia.Audio.Capturer",
+ "const.SystemCapability.Multimedia.Audio.Device",
+ "const.SystemCapability.Multimedia.Audio.Volume",
+ "const.SystemCapability.Multimedia.Audio.Communication",
+ "const.SystemCapability.Multimedia.Audio.Tone",
+ "const.SystemCapability.Multimedia.Audio.Interrupt",
+ "const.SystemCapability.Multimedia.Image.Core",
+ "const.SystemCapability.Multimedia.Image.ImageSource",
+ "const.SystemCapability.Multimedia.Image.ImagePacker",
+ "const.SystemCapability.Multimedia.Image.ImageReceiver",
+ "const.SystemCapability.Multimedia.Image.ImageCreator",
+ "const.SystemCapability.Multimedia.Camera.Core",
+ "const.SystemCapability.Multimedia.MediaLibrary.Core",
+ "const.SystemCapability.Multimedia.MediaLibrary.DistributedCore",
+ "const.SystemCapability.FileManagement.UserFileManager.Core",
+ "const.SystemCapability.FileManagement.UserFileManager.DistributedCore",
+ "const.SystemCapability.Multimedia.Media.Core",
+ "const.SystemCapability.Multimedia.Media.AudioPlayer",
+ "const.SystemCapability.Multimedia.Media.VideoPlayer",
+ "const.SystemCapability.Multimedia.Media.AudioRecorder",
+ "const.SystemCapability.Multimedia.Media.VideoRecorder",
+ "const.SystemCapability.Multimedia.Media.AudioDecoder",
+ "const.SystemCapability.Multimedia.Media.AudioEncoder",
+ "const.SystemCapability.Multimedia.Media.VideoDecoder",
+ "const.SystemCapability.Multimedia.Media.VideoEncoder",
+ "const.SystemCapability.Multimedia.Media.CodecBase",
+ "const.SystemCapability.Multimedia.Media.AVPlayer",
+ "const.SystemCapability.Multimedia.Media.AVRecorder",
+ "const.SystemCapability.Multimedia.SystemSound.Core",
+ "const.SystemCapability.Multimedia.AVSession",
+ "const.SystemCapability.Multimedia.AVSession.Core",
+ "const.SystemCapability.Multimedia.AVSession.Manager",
+ "const.SystemCapability.MultimodalInput.Input.InputConsumer",
+ "const.SystemCapability.MultimodalInput.Input.InputDevice",
+ "const.SystemCapability.MultimodalInput.Input.Core",
+ "const.SystemCapability.MultimodalInput.Input.InputSimulator",
+ "const.SystemCapability.MultimodalInput.Input.InputMonitor",
+ "const.SystemCapability.MultimodalInput.Input.Pointer",
+ "const.SystemCapability.MultimodalInput.Input.ShortKey",
+ "const.SystemCapability.Telephony.DataStorage",
+ "const.SystemCapability.Telephony.CellularCall",
+ "const.SystemCapability.Telephony.CellularData",
+ "const.SystemCapability.Telephony.SmsMms",
+ "const.SystemCapability.Telephony.StateRegistry",
+ "const.SystemCapability.Telephony.CallManager",
+ "const.SystemCapability.Telephony.CoreService",
+ "const.SystemCapability.Global.I18n",
+ "const.SystemCapability.Global.ResourceManager",
+ "const.SystemCapability.PowerManager.BatteryStatistics",
+ "const.SystemCapability.PowerManager.ThermalManager",
+ "const.SystemCapability.PowerManager.PowerManager.Core",
+ "const.SystemCapability.PowerManager.PowerManager.Extension",
+ "const.SystemCapability.PowerManager.DisplayPowerManager",
+ "const.SystemCapability.PowerManager.BatteryManager.Core",
+ "const.SystemCapability.PowerManager.BatteryManager.Extension",
+ "const.SystemCapability.USB.USBManager",
+ "const.SystemCapability.Applications.settings.Core",
+ "const.SystemCapability.Applications.ContactsData",
+ "const.SystemCapability.XTS.DeviceAttest",
+ "const.SystemCapability.Test.WuKong",
+ "const.SystemCapability.Test.UiTest",
+ "const.SystemCapability.DistributedHardware.DistributedAudio",
+ "const.SystemCapability.DistributedHardware.DistributedCamera",
+ "const.SystemCapability.DistributedHardware.DistributedScreen",
+ "const.SystemCapability.DistributedHardware.DistributedInput",
+ "const.SystemCapability.DistributedHardware.DistributedHardwareFWK",
+ "const.SystemCapability.DistributedHardware.DeviceManager",
+ "const.SystemCapability.Msdp.DeviceStatus.Stationary",
+ "const.SystemCapability.FileManagement.File.FileIO",
+ "const.SystemCapability.FileManagement.File.Environment",
+ "const.SystemCapability.FileManagement.File.DistributedFile",
+ "const.SystemCapability.FileManagement.DistributedFileService.CloudSyncManager",
+ "const.SystemCapability.FileManagement.UserFileService",
+ "const.SystemCapability.FileManagement.AppFileService",
+ "const.SystemCapability.FileManagement.StorageService.Backup",
+ "const.SystemCapability.FileManagement.StorageService.SpatialStatistics",
+ "const.SystemCapability.FileManagement.StorageService.Volume",
+ "const.SystemCapability.FileManagement.StorageService.Encryption",
+ "const.SystemCapability.ResourceSchedule.WorkScheduler",
+ "const.SystemCapability.ResourceSchedule.BackgroundTaskManager.ContinuousTask",
+ "const.SystemCapability.ResourceSchedule.BackgroundTaskManager.TransientTask",
+ "const.SystemCapability.ResourceSchedule.BackgroundTaskManager.EfficiencyResourcesApply",
+ "const.SystemCapability.ResourceSchedule.UsageStatistics.AppGroup",
+ "const.SystemCapability.ResourceSchedule.UsageStatistics.App",
+ "const.SystemCapability.BarrierFree.Accessibility.Core",
+ "const.SystemCapability.BarrierFree.Accessibility.Hearing",
+ "const.SystemCapability.BarrierFree.Accessibility.Vision",
+ "const.SystemCapability.Customization.ConfigPolicy",
+ "const.SystemCapability.Customization.EnterpriseDeviceManager",
+ "const.SystemCapability.Web.Webview.Core",
+ "const.SystemCapability.Ai.MindSpore",
+ "persist.time.timezone",
+ "hiviewdfx.hiprofiler.",
+ "libc.hook_mode.",
+ "hilog.private.on",
+ "hilog.debug.on",
+ "persist.sys.hilog.kmsg.on",
+ "persist.sys.hilog.debug.on",
+ "hilog.flowctrl.proc.on",
+ "hilog.flowctrl.domain.on",
+ "hilog.loggable.global",
+ "hilog.buffersize.global",
+ "persist.time.",
+ "const.ark.minVersion",
+ "const.ark.version",
+ "const.display.brightness.min",
+ "const.display.brightness.default",
+ "const.display.brightness.max",
+ "persist.telephony.",
+ "telephony.",
+ "sys.",
+ "sys.usb",
+ "net.",
+ "net.tcp.",
+ "const.postinstall.",
+ "const.postinstall.fstab.",
+ "const.allow.",
+ "const.allow.mock.",
+ "security.",
+ "persist.",
+ "persist.sys.",
+ "debug.",
+ "musl.",
+ "bootevent.wms.",
+ "ffrt.",
+ "hiviewdfx.hiperf.",
+ "persist.multimedia.audio.",
+ "persist.ark.",
+ "persist.ace.",
+ "accesstoken.permission.",
+ "persist.bms.",
+ "distributedsched.continuationmanager.",
+ "updater.hdc.configfs",
+ "updater.flashd.configfs",
+ "updater.data.configs",
+ "persist.xts.devattest.authresult",
+ "llvm.debug.service.",
+ "persist.edm.edm_enable",
+ "persist.edm.",
+ "persist.usb.setting.gadget_conn_prompt",
+ "persist.usb.setting."
+]
diff --git a/tools/startup_guard/startup_checker/__init__.py b/tools/startup_guard/startup_checker/__init__.py
new file mode 100755
index 0000000000000000000000000000000000000000..5231516e9c3e1f8ad4fbcd7ee038e9c17386c792
--- /dev/null
+++ b/tools/startup_guard/startup_checker/__init__.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .cmds_rule import cmdRule
+from .system_parameter_rules import SystemParameterRule
+
+def check_all_rules(mgr, args):
+ rules = [
+ cmdRule,
+ SystemParameterRule,
+ ]
+
+ passed = True
+ for rule in rules:
+ r = rule(mgr, args)
+ r.log("Do %s rule checking now:" % rule.RULE_NAME)
+ if not r.__check__():
+ passed = False
+ r.log(" Please refer to: \033[91m%s\x1b[0m" % r.get_help_url())
+
+ if args and args.no_fail:
+ return True
+
+ return passed
diff --git a/tools/startup_guard/startup_checker/base_rule.py b/tools/startup_guard/startup_checker/base_rule.py
new file mode 100755
index 0000000000000000000000000000000000000000..082ca1286a9f15694b71c99ed5583d89c117f546
--- /dev/null
+++ b/tools/startup_guard/startup_checker/base_rule.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import json
+
+class BaseRule(object):
+ RULE_NAME = ""
+
+ def __init__(self, mgr, args):
+ self._args = args
+ self._mgr = mgr
+ self._white_lists = self.__load_files__("whitelist.json")
+
+ def __load_files__(self, name):
+ rules_dir = []
+ rules_dir.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../rules"))
+ if self._args and self._args.rules:
+ rules_dir = rules_dir + self._args.rules
+
+ res = []
+ for d in rules_dir:
+ rules_file = os.path.join(d, self.__class__.RULE_NAME, name)
+ try:
+ with open(rules_file, "r") as f:
+ jsonstr = "".join([ line.strip() for line in f if not line.strip().startswith("//") ])
+ res = res + json.loads(jsonstr)
+ except:
+ pass
+
+ return res
+
+ def get_mgr(self):
+ return self._mgr
+
+ def get_white_lists(self):
+ return self._white_lists
+
+ def log(self, info):
+ print(info)
+
+ def warn(self, info):
+ print("\033[35m[WARNING]\x1b[0m: %s" % info)
+
+ def error(self, info):
+ print("\033[91m[NOT ALLOWED]\x1b[0m: %s" % info)
+
+ def get_help_url(self):
+ return "https://gitee.com/openharmony/developtools_integration_verification/tree/master/tools/startup_guard/rules/%s/README.md" % self.__class__.RULE_NAME
+
+ # To be override
+ def __check__(self):
+ # Default pass
+ return True
diff --git a/tools/startup_guard/startup_checker/cmds_rule.py b/tools/startup_guard/startup_checker/cmds_rule.py
new file mode 100644
index 0000000000000000000000000000000000000000..15d1ed23c32c7ae09d1c63e784baf8881553bbea
--- /dev/null
+++ b/tools/startup_guard/startup_checker/cmds_rule.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import json
+
+from .base_rule import BaseRule
+
+class cmdRule(BaseRule):
+ RULE_NAME = "NO-Config-Cmds-In-Init"
+
+ def __init__(self, mgr, args):
+ super().__init__(mgr, args)
+ self._cmds = {}
+ self._start_modes = {}
+ self._boot_list = {}
+ self._condition_list = {}
+ self._start_cmd_list = {}
+
+ def _get_json_service(self):
+ for i in range(len(self._start_modes)):
+ if self._start_modes[i]["start-mode"] == "boot":
+ self._boot_list = self._start_modes[i]["service"]
+ elif self._start_modes[i]["start-mode"] == "condition":
+ self._condition_list = self._start_modes[i]["service"]
+ pass
+
+ def _get_start_cmds(self, parser):
+ list = {}
+ for cmd in parser._cmds:
+ if cmd["name"] == "start":
+ list[cmd["content"]] = cmd["fileId"]
+ pass
+ return list
+
+ def _parse_while_list(self):
+ white_lists =self.get_white_lists()[0]
+ for key, item in white_lists.items():
+ if key == "cmds":
+ self._cmds = item
+ if key == "start-modes":
+ self._start_modes = item
+ if key == "start-cmd":
+ self._start_cmd_list = item
+
+ def _check_condition_start_mode(self, cmd_list, service_name, passed):
+ if service_name in self._condition_list and service_name in cmd_list:
+ pass
+ else:
+ self.warn("\'%s\' cannot be started in conditional mode" % service_name)
+ return passed
+
+
+ def _check_service(self, parser):
+ boot_passed = True
+ condition_passed = True
+ start_cmd_list = self._get_start_cmds(parser).keys()
+ for key, item in parser._services.items():
+ if item.get("start_mode") == "boot":
+ if key not in self._boot_list:
+ self.warn("\'%s\' cannot be started in boot mode" % key)
+ elif item.get("on_demand") is not True and item.get("start_mode") == "condition":
+ condition_passed = self._check_condition_start_mode(start_cmd_list, key, condition_passed)
+ return boot_passed and condition_passed
+
+ def _check_file_id_in_cmds(self, cmdlist, cmdline):
+ file_id_list = set()
+ for i in range(len(cmdlist)):
+ if cmdline == cmdlist[i]["name"]:
+ file_id_list.add(cmdlist[i]["fileId"])
+ pass
+ return file_id_list
+
+ def _check_cmdline_in_parser(self, parser):
+ passed = True
+ cmdline = []
+ file_id_list = set()
+ parser_cmds = parser._cmds
+
+ for cmd in self._cmds:
+ cmdline = cmd["cmd"]
+ file_id_list = self._check_file_id_in_cmds(parser_cmds, cmdline)
+ file_lists = cmd["location"]
+ for key, item in parser._files.items():
+ if item["fileId"] in file_id_list and key not in file_lists:
+ output = "\'{}\' is timeout command, in {}".format(cmd["cmd"], key)
+ self.error("%s" % str(output))
+ passed = False
+ file_id_list.clear()
+ return passed
+
+ def _check_selinux(self, parser):
+ if parser._selinux != 'enforcing':
+ self.warn("selinux status is %s" %parser._selinux)
+ return True
+
+ passed = True
+ for key, item in parser._services.items():
+ if item.get("secon") == "":
+ output_str = "%s \'secon\' is empty" % key
+ self.error("%s" % str(output_str))
+ passed = False
+ return passed
+
+ def _check_start_cmd(self, parser):
+ passed = True
+ start_cmd_list = self._get_start_cmds(parser)
+ for cmd, file_id in start_cmd_list.items():
+ if cmd in list(self._start_cmd_list):
+ pass
+ else:
+ for key, item in parser._files.items():
+ if item["fileId"] == file_id:
+ log_str = "{} is not in start cmd list. path:{}".format(cmd, item["file_name"])
+ self.warn("%s" % log_str)
+ passed = False
+ pass
+ return passed
+
+ def check_config_cmd(self):
+ passed = True
+ self._parse_while_list()
+ cfg_parser = self.get_mgr().get_parser_by_name('cmd_whitelist')
+ self._get_json_service()
+
+ start_passed = self._check_start_cmd(cfg_parser)
+ secon_passed = self._check_selinux(cfg_parser)
+ cmd_passed = self._check_cmdline_in_parser(cfg_parser)
+ start_mode_passed = self._check_service(cfg_parser)
+ passed = start_passed and secon_passed and cmd_passed and start_mode_passed
+ return passed
+
+ def __check__(self):
+ return self.check_config_cmd()
diff --git a/tools/startup_guard/startup_checker/system_parameter_rules.py b/tools/startup_guard/startup_checker/system_parameter_rules.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f11a4dccc39237e0f23bd521c12f6916814d116
--- /dev/null
+++ b/tools/startup_guard/startup_checker/system_parameter_rules.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .base_rule import BaseRule
+
+class SystemParameterRule(BaseRule):
+ RULE_NAME = "NO-Config-SystemParameter-In-INIT"
+ CONFIG_DAC_MAX_NUM = 200
+
+ def _check_param_name(self, param_name, empty_flag):
+ # len: (0, 96]
+ # Only allow alphanumeric, plus '.', '-', '@', ':', or '_'/
+ # Don't allow ".." to appear in a param name
+ if len(param_name) > 96 or len(param_name) < 1 or param_name[0] == '.' or '..' in param_name:
+ return False
+
+ if empty_flag is False:
+ if param_name[-1] == '.':
+ return False
+
+ if param_name == "#":
+ return True
+
+ for char_value in param_name:
+ if char_value in '._-@:':
+ continue
+
+ if char_value.isalnum():
+ continue
+ return False
+ return True
+
+ def _check_Param_in_init(self):
+ passed = True
+ value_empty_flag = True
+ white_list =self.get_white_lists()
+ parser = self.get_mgr().get_parser_by_name('system_parameter_whitelist')
+ counts = 0
+ for key, item in parser._parameters.items():
+ if (item.get("dacMode") != 0):
+ counts += 1
+ if str(item)[-1] == "=":
+ value_empty_flag = True
+ else:
+ value_empty_flag = False
+
+ if not self._check_param_name(key, value_empty_flag):
+ self.error("Invalid param: %s" % key)
+ continue
+ if key in white_list:
+ continue
+ if counts > SystemParameterRule.CONFIG_DAC_MAX_NUM:
+ self.error("DAC overallocated memory")
+ passed = False
+ return passed
+
+ def __check__(self):
+ return self._check_Param_in_init()
diff --git a/tools/startup_guard/startup_guard.py b/tools/startup_guard/startup_guard.py
new file mode 100755
index 0000000000000000000000000000000000000000..92e259c8b4c02d3ba33d798077e556aa1d4fd665
--- /dev/null
+++ b/tools/startup_guard/startup_guard.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#coding=utf-8
+
+#
+# Copyright (c) 2023 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from config_parser_mgr import ConfigParserMgr
+
+def __create_arg_parser():
+ import argparse
+ parser = argparse.ArgumentParser(description='Check startup architecture information from compiled output files.')
+ parser.add_argument('-i', '--input',
+ help='input config files base directory example "out/rk3568" ', required=True)
+ parser.add_argument('-r', '--rules', action='append',
+ help='rules directory', required=False)
+ parser.add_argument('-n', '--no_fail',
+ help='force to pass all rules', required=False)
+ return parser
+
+def startup_guard(out_path, args=None):
+ mgr = ConfigParserMgr()
+ mgr.load_all_parser(out_path)
+
+ from startup_checker import check_all_rules
+ passed = check_all_rules(mgr, args)
+ if passed:
+ print("All rules passed")
+ else:
+ print("Please modify according to README.md")
+
+if __name__ == '__main__':
+ parser = __create_arg_parser()
+ args = parser.parse_args()
+ startup_guard(args.input, args)