diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..2f78cf5b66514f2506d9af5f3dadf3dee7aa6d9f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +*.pyc + diff --git a/deps_guard/README.md b/deps_guard/README.md new file mode 100755 index 0000000000000000000000000000000000000000..1be2b7a20e82b521bbd4b1acf2c9469905da8cde --- /dev/null +++ b/deps_guard/README.md @@ -0,0 +1,14 @@ +# 依赖关系看护系统 + + + +依赖关系看护系统用于根据编译产物分析各个ELF文件之间的架构依赖关系,对违规的依赖关系进行拦截。 + +支持的拦截规则如下: + +| 规则名 | 规则说明 | +| -------------------------------------------------------- | ------------------------------------------------------------ | +| [NO-Depends-On-NAPI](rules/NO-Depends-On-NAPI/README.md) | 所有安装到/system/lib{64}/module目录下的napi模块都不允许被其它模块依赖。 | +| [NO-Depends-On-SA](rules/NO-Depends-On-SA/README.md) | 所有的系统SA模块都不允许被其它模块依赖。 | +| [ChipsetSDK](rules/ChipsetSDK/README.md) | 所有能被芯片组件模块依赖的系统组件ChipsetSDK模块都需白名单管理,不能依赖白名单之外的系统组件模块。 | + diff --git a/deps_guard/deps_guard.py b/deps_guard/deps_guard.py new file mode 100755 index 0000000000000000000000000000000000000000..193ebc7edc050cc41377c3a91ae2f24b30621fcc --- /dev/null +++ b/deps_guard/deps_guard.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python +#coding=utf-8 + +from elf_file_mgr import ElfFileMgr + +def createArgParser(): + import argparse + + parser = argparse.ArgumentParser(description='Check architecture information from compiled output files.') + + parser.add_argument('-i', '--input', + help='input asset files root directory', required=True) + + parser.add_argument('-r', '--rules', + help='rules directory', required=False) + + parser.add_argument('-n', '--no-fail', + help='force to pass all rules', required=False) + + return parser + +if __name__ == '__main__': + + parser = createArgParser() + args = parser.parse_args() + + # Scan + mgr = ElfFileMgr(args.input) + mgr.scan_all_files() + + from rules_checker import check_all_rules + check_all_rules(mgr, args) diff --git a/deps_guard/elf_file_mgr/__init__.py b/deps_guard/elf_file_mgr/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..fcb0354315e85cd841ec675707be4680b0972338 --- /dev/null +++ b/deps_guard/elf_file_mgr/__init__.py @@ -0,0 +1,6 @@ +#! /usr/bin/env python +#coding=utf-8 + +from .elf_walker import ELFWalker + +from .elf_file_mgr import ElfFileMgr diff --git a/deps_guard/elf_file_mgr/elf_file.py b/deps_guard/elf_file_mgr/elf_file.py new file mode 100755 index 0000000000000000000000000000000000000000..c0359f8c170731df4e2200036161e69487c442a7 --- /dev/null +++ b/deps_guard/elf_file_mgr/elf_file.py @@ -0,0 +1,72 @@ +#! /usr/bin/env python +#coding=utf-8 + +import os +from stat import * + +from .utils import command + +class ElfFile(dict): + def __init__(self, file, prefix): + self._f = file + self._f_safe = "'%s'" % file + + self["name"] = os.path.basename(file) + self["size"] = os.stat(self._f)[ST_SIZE] + if self["name"].find(".so") > 0: + self["type"] = "lib" + else: + self["type"] = "bin" + self["path"] = file[len(prefix):] + #self.__extract_elf_size() + + def __eq__(self, other): + if not isinstance(other, ElfFile): + return NotImplemented + + return self["path"] == other["path"]#and self["name"] == other["name"] + + def __extract_soname(self): + soname_data = command("mklibs-readelf", "--print-soname", self._f_safe) + if soname_data: + return soname_data.pop() + return "" + + def __extract_elf_size(self): + size_data = command("size", self._f_safe) + if not size_data or len(size_data) < 2: + self["text_size"] = 0 + self["data_size"] = 0 + self["bss_size"] = 0 + return 0 + + vals = size_data[1].split() + self["text_size"] = int(vals[0]) + self["data_size"] = int(vals[1]) + self["bss_size"] = int(vals[2]) + return "" + + def is_library(self): + if self["name"].find(".so") > 0: + return True + return False + + def get_file(self): + return self._f + + # Return a set of libraries the passed objects depend on. + def library_depends(self): + if not os.access(self._f, os.F_OK): + raise Exception("Cannot find lib: " + self._f) + return command("mklibs-readelf", "--print-needed", self._f_safe) + +if __name__ == '__main__': + import elf_walker + + cnt = 0 + elfFiles = elf_walker.ELFWalker() + for f in elfFiles.get_elf_files(): + if f.find("libskia_ohos.z.so") < 0: + continue + elf = ElfFile(f, elfFiles.get_product_images_path()) + print(f) diff --git a/deps_guard/elf_file_mgr/elf_file_mgr.py b/deps_guard/elf_file_mgr/elf_file_mgr.py new file mode 100755 index 0000000000000000000000000000000000000000..05ffd8bba862f0e3964316f7f7a4bbb35e6ccced --- /dev/null +++ b/deps_guard/elf_file_mgr/elf_file_mgr.py @@ -0,0 +1,374 @@ +#! /usr/bin/env python +#coding=utf-8 + +import string +import sys +import os + +from .elf_file import ElfFile +from .elf_walker import ELFWalker + +class ElfFileWithDepsInfo(ElfFile): + def __init__(self, file, prefix): + super(ElfFileWithDepsInfo, self).__init__(file, prefix) + self["deps"] = [] + self["dependedBy"] = [] + + self["deps_indirect"] = [] + self["dependedBy_indirect"] = [] + self["deps_total"] = 0 + self["dependedBy_total"] = 0 + + self._cached = False + + def __eq__(self, other): + if not isinstance(other, ElfFileWithDepsInfo): + return NotImplemented + + return self["id"] == other["id"]#and self["name"] == other["name"] + + def dependsOn(self, mod): + for dep in self["deps"]: + if dep["callee"] == mod: + return True + return False + + def getAllDependedModules(self): + res = [] + for dep in self["deps"]: + res.append(dep["callee"]) + return res + self["deps_indirect"] + + def __repr__(self): + return self.__str__() + + def __str__(self): + #return "%s deps:%s\n%s deps_indirect:%s" % (self["name"], self.getDepends(), self["name"], self.getIndirectDepends()) + return "%s:%d deps(%d) depsTotal(%d) dependedBy(%d)" % (self["name"], self["id"], len(self["deps"]), len(self["deps"]) + len(self["deps_indirect"]), len(self["dependedBy"])) + +class Dependency(dict): + def __init__(self, idx, caller, callee): + self["id"] = idx + self["caller_id"] = caller["id"] + self["callee_id"] = callee["id"] + self["caller"] = caller + self["callee"] = callee + self["external"] = False + self["calls"] = 0 + + def __eq__(self, other): + if not isinstance(other, Dependency): + return NotImplemented + + return self["id"] == other["id"]#and self["name"] == other["name"] + + def __repr__(self): + return self.__str__() + + def __str__(self): + return "(%s:%s[%d] -%d:%d-> %s:%s[%d])" % (self["caller"]["componentName"], self["caller"]["name"], self["caller"]["id"], int(self["external"]), self["calls"], self["callee"]["componentName"], self["callee"]["name"], self["callee"]["id"]) + +from .module_info import CompileInfoLoader +from .hdi import HdiParser +from .sa import SAParser + +class ElfFileMgr(object): + def __init__(self, product_out_path=None, elfFileClass=None, dependenceClass = None): + self._elfFiles = [] + self._path_dict = {} + self._basename_dict = {} + if elfFileClass: + self._elfFileClass = elfFileClass + else: + self._elfFileClass = ElfFileWithDepsInfo + + self._deps = [] + if dependenceClass: + self._dependenceClass = dependenceClass + else: + self._dependenceClass = Dependency + self._depIdx = 1 + self._elfIdx = 1 + + self._not_found_depened_files = [] + + walker = ELFWalker(product_out_path) + self._prefix = walker.get_product_images_path() + self._product_out_path = walker.get_product_out_path() + self._link_file_map = walker.get_link_file_map() + + def scan_all_files(self): + walker = ELFWalker(self._product_out_path) + + self._scan_all_elf_files(walker) + self._build_deps_tree() + + self._maxDepth = 0 + self._maxTotalDepends = 0 + + print("Build indirect dependence tree for %d ELF files now ..." % len(self._elfFiles)) + + for mod in self._elfFiles: + mod["_recursiveFinished"] = False + for mod in self._elfFiles: + self.__update_indirect_deps_recursive(mod) + for mod in self._elfFiles: + mod["_recursiveFinished"] = False + for mod in self._elfFiles: + self.__update_indirect_dependedBy_recursive(mod) + for mod in self._elfFiles: + del mod["_recursiveFinished"] + + print("Load compile information now ...") + CompileInfoLoader.load(self, self._product_out_path) + HdiParser.load(self, self._product_out_path) + SAParser.load(self, self._product_out_path) + + def get_product_images_path(self): + return self._prefix + + def get_product_out_path(self): + return self._product_out_path + + def add_elf_file(self, elf): + # Append to array in order + elf["id"] = self._elfIdx + self._elfIdx = self._elfIdx + 1 + self._elfFiles.append(elf) + + # Add to dictionary with path as key + self._path_dict[elf["path"]] = elf + + # Add to dictionary with basename as key + if elf["name"] in self._basename_dict: + self._basename_dict[elf["name"]].append(elf) + else: + self._basename_dict[elf["name"]] = [ elf ] + + def _scan_all_elf_files(self, walker): + print("Scanning %d ELF files now ..." % len(walker.get_elf_files())) + for f in walker.get_elf_files(): + elf = self._elfFileClass(f, self._prefix) + if elf["path"] in self._path_dict: + print("Warning: duplicate " + elf.get_file() + ' skipped.') + continue + + # Ignore these files + if elf["name"] in [ "ld-musl-aarch64.so.1", "ld-musl-arm.so.1", "hdc_std" ]: + continue + + self.add_elf_file(elf) + + # Reorder libraries with same name as defined by LD_LIBRARY_PATH + for bname, val in self._basename_dict.items(): + if len(val) < 2: + continue + self._basename_dict[bname] = self.__reorder_library(val) + + def __reorder_library(self, val): + orders = [] + idx = 0 + for p in val: + orders.append((self.__get_library_order(p["path"]), idx)) + idx = idx + 1 + orders.sort() + + res = [] + for item in orders: + res.append(val[item[1]]) + + return res + + def __get_library_order(self, path): + if not path.startswith("/"): + path = "/" + path + if path.find("/lib64/") > 0: + pathOrder = "/system/lib64:/vendor/lib64:/vendor/lib64/chipsetsdk:/system/lib64/ndk:/system/lib64/chipset-pub-sdk:/system/lib64/chipset-sdk:/system/lib64/platformsdk:/system/lib64/priv-platformsdk:/system/lib64/priv-module:/system/lib64/module:/system/lib64/module/data:/system/lib64/module/multimedia:/system/lib:/vendor/lib:/system/lib/ndk:/system/lib/chipset-pub-sdk:/system/lib/chipset-sdk:/system/lib/platformsdk:/system/lib/priv-platformsdk:/system/lib/priv-module:/system/lib/module:/system/lib/module/data:/system/lib/module/multimedia:/lib64:/lib:/usr/local/lib:/usr/lib" + else: + pathOrder = "/system/lib:/vendor/lib:/vendor/lib/chipsetsdk:/system/lib/ndk:/system/lib/chipset-pub-sdk:/system/lib/chipset-sdk:/system/lib/platformsdk:/system/lib/priv-platformsdk:/system/lib/priv-module:/system/lib/module:/system/lib/module/data:/system/lib/module/multimedia:/lib:/usr/local/lib:/usr/lib" + + if path.rfind("/") < 0: + return 1000 + + path = path[:path.rfind("/")] + paths = pathOrder.split(':') + idx = 0 + for p in paths: + if p == path: + return idx + idx = idx + 1 + return 1000 + + + def _build_deps_tree(self): + print("Build dependence tree for %d ELF files now ..." % len(self._elfFiles)) + for elf in self._elfFiles: + self.__build_deps_tree_for_one_elf(elf) + print(" Got %d dependencies" % self._depIdx) + + def add_dependence(self, caller, callee): + dep = self._dependenceClass(self._depIdx, caller, callee) + caller["deps"].append(dep) + callee["dependedBy"].append(dep) + + self._deps.append(dep) + self._depIdx = self._depIdx + 1 + return dep + + def __build_deps_tree_for_one_elf(self, elf): + for lib in elf.library_depends(): + dep_elf = self.get_elf_by_name(lib) + if not dep_elf: + self._not_found_depened_files.append({"caller": elf["name"], "callee": lib}) + print("Warning: can not find depended library [" + lib + "] for " + elf["name"]) + break + + self.add_dependence(elf, dep_elf) + + def get_elf_by_path(self, path): + if path not in self._path_dict and path.find("/lib64/") > 0: + path = path.replace("/lib64/", "/lib/") + if path in self._path_dict: + return self._path_dict[path] + if path.find("/platformsdk/") > 0: + return None + + if path.startswith("system/lib64/"): + path = path.replace("system/lib64/", "system/lib64/platformsdk/") + elif path.startswith("system/lib/"): + path = path.replace("system/lib/", "system/lib/platformsdk/") + else: + return None + + if path not in self._path_dict and path.find("/lib64/") > 0: + path = path.replace("/lib64/", "/lib/") + if path in self._path_dict: + return self._path_dict[path] + return None + + def get_elf_by_idx(self, idx): + if idx < 1 or idx > len(self._elfFiles): + return None + return self._elfFiles[idx - 1] + + def __get_link_file(self, name): + for src, target in self._link_file_map.items(): + tmp_name = os.path.basename(src) + if name != tmp_name: + continue + tmp_name = os.path.dirname(src) + tmp_name = os.path.join(tmp_name, target) + link_elf = ElfFile(tmp_name, self._prefix) + return self.get_elf_by_path(link_elf["path"]) + + def get_elf_by_name(self, name): + if name in self._basename_dict: + return self._basename_dict[name][0] + + #print("Library [" + name + "] not found, try find by soft links:") + return self.__get_link_file(name) + + def get_all(self): + return self._elfFiles + + def get_all_deps(self): + return self._deps + + def __update_indirect_dependedBy_recursive(self, mod): + # Already finished + if mod["_recursiveFinished"]: + return mod["dependedBy_depth"] + + maxDepth = 0 + for item in mod["dependedBy"]: + # update caller first + caller = item["caller"] + depth = self.__update_indirect_dependedBy_recursive(caller) + if depth > maxDepth: + maxDepth = depth + for dep in caller["dependedBy"]: + grand_caller = dep["caller"] + if grand_caller.dependsOn(mod): + continue + if grand_caller in mod["dependedBy_indirect"]: + continue + mod["dependedBy_indirect"].append(grand_caller) + for dep in caller["dependedBy_indirect"]: + if dep.dependsOn(mod): + continue + if dep in mod["dependedBy_indirect"]: + continue + mod["dependedBy_indirect"].append(dep) + + if len(mod["dependedBy"]) > 0: + maxDepth = maxDepth + 1 + + mod["_recursiveFinished"] = True + mod["dependedBy_depth"] = maxDepth + + if maxDepth > self._maxDepth: + self._maxDepth = maxDepth + depsTotal = len(mod["dependedBy"]) + len(mod["dependedBy_indirect"]) + if depsTotal > self._maxTotalDepends: + self._maxTotalDepends = depsTotal + + mod["dependedBy_total"] = depsTotal + + return maxDepth + + def __update_indirect_deps_recursive(self, mod): + # Already finished + if mod["_recursiveFinished"]: + return mod["depth"] + + maxDepth = 0 + for item in mod["deps"]: + # update child first + child = item["callee"] + depth = self.__update_indirect_deps_recursive(child) + if depth > maxDepth: + maxDepth = depth + for dep in child["deps"]: + if mod.dependsOn(dep["callee"]): + continue + if dep["callee"] in mod["deps_indirect"]: + continue + mod["deps_indirect"].append(dep["callee"]) + for dep in child["deps_indirect"]: + if mod.dependsOn(dep): + continue + if dep in mod["deps_indirect"]: + continue + mod["deps_indirect"].append(dep) + + if len(mod["deps"]) > 0: + maxDepth = maxDepth + 1 + + mod["_recursiveFinished"] = True + mod["depth"] = maxDepth + + if maxDepth > self._maxDepth: + self._maxDepth = maxDepth + depsTotal = len(mod["deps"]) + len(mod["deps_indirect"]) + if depsTotal > self._maxTotalDepends: + self._maxTotalDepends = depsTotal + + mod["deps_total"] = depsTotal + + return maxDepth + +if __name__ == '__main__': + mgr = ElfFileMgr("/home/z00325844/demo/archinfo/assets/rk3568/3.2.7.5") + mgr.scan_all_files() + elf = mgr.get_elf_by_path("system/lib/libskia_ohos.z.so") + print("Get skia now ...") + #print(len(elf["deps_indirect"])) + #print(len(elf["dependedBy_indirect"])) + #print(elf["deps_indirect"][0]) + + res = mgr.get_elf_by_path("system/lib/platformsdk/libhmicui18n.z.so") + print(res) + #print(mgr.get_all()) + #print(elf["deps_indirect"]) + #print(elf.matchCalls()) + #print(len(elf["dependedBy"])) diff --git a/deps_guard/elf_file_mgr/elf_walker.py b/deps_guard/elf_file_mgr/elf_walker.py new file mode 100755 index 0000000000000000000000000000000000000000..bf8a7db825ac81704e500091e4f5e0261ec9faf8 --- /dev/null +++ b/deps_guard/elf_file_mgr/elf_walker.py @@ -0,0 +1,64 @@ +#! /usr/bin/env python +#coding=utf-8 + +import string +import json +import sys +import os +import struct + +# find out/rk3568/packages/phone/system/ -type f -print | file -f - | grep ELF | cut -d":" -f1 | wc -l + +class ELFWalker(): + def __init__(self, product_out_path="/home/z00325844/demo/archinfo/assets/rk3568/3.2.7.5"): + self._files = [] + self._links = {} + self._walked = False + self._product_out_path = product_out_path + + def get_product_images_path(self): + return os.path.join(self._product_out_path, "packages/phone/") + + def get_product_out_path(self): + return self._product_out_path + + def __walk_path(self, subdir): + for root, subdirs, files in os.walk(os.path.join(self._product_out_path, subdir)): + for _filename in files: + _assetFile = os.path.join(root, _filename) + if os.path.islink(_assetFile): + if _assetFile.find(".so") > 0: + target = os.readlink(_assetFile) + self._links[_assetFile] = target + continue + if not os.path.isfile(_assetFile): + continue + with open(_assetFile, "rb") as f: + data = f.read(4) + try: + magic = struct.unpack("Bccc", data) + if magic[0] == 0x7F and magic[1] == b'E' and magic[2] == b'L' and magic[3] == b'F': + self._files.append(_assetFile) + except: + pass + + self._walked = True + + def get_link_file_map(self): + if not self._walked: + self.__walk_path("packages/phone/system") + self.__walk_path("packages/phone/vendor") + return self._links + + def get_elf_files(self): + if not self._walked: + self.__walk_path("packages/phone/system") + self.__walk_path("packages/phone/vendor") + return self._files + +if __name__ == '__main__': + elfFiles = ELFWalker() + for f in elfFiles.get_elf_files(): + print(f) + for src, target in elfFiles.get_link_file_map().items(): + print(src + " -> " + target) diff --git a/deps_guard/elf_file_mgr/hdi/__init__.py b/deps_guard/elf_file_mgr/hdi/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..2154b7dfca327ef50d3c2a874d145ec953159510 --- /dev/null +++ b/deps_guard/elf_file_mgr/hdi/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +#coding=utf-8 + +from .hdi_parser import HdiParser diff --git a/deps_guard/elf_file_mgr/hdi/hdi_parser.py b/deps_guard/elf_file_mgr/hdi/hdi_parser.py new file mode 100755 index 0000000000000000000000000000000000000000..35e2f1beea9bf82934a8d5446323cc5684b836f4 --- /dev/null +++ b/deps_guard/elf_file_mgr/hdi/hdi_parser.py @@ -0,0 +1,56 @@ +#! /usr/bin/env python +#coding=utf-8 + +import os + +class HdiParser(object): + @staticmethod + def load(mgr, product_out_path): + # Decode hcb file to get hcs file + hdi_tool = os.path.join(product_out_path, "obj/drivers/hdf_core/framework/tools/hc-gen/hc-gen") + hcs_file = os.path.join(product_out_path, "packages/phone/vendor/etc/hdfconfig/hdf_default.hcb") + out_file = os.path.join(product_out_path, "device_info.hcs") + os.system('%s -d "%s" -o "%s"' % (hdi_tool, hcs_file, out_file)) + + try: + with open(out_file) as f: + lines = f.readlines() + except: + try: + out_file = os.path.join(product_out_path, "device_info.d.hcs") + with open(out_file) as f: + lines = f.readlines() + except: + return + + modules = [] + for line in lines: + line = line.strip() + if line.find("moduleName") < 0: + continue + parts = line.split("=") + parts = [p.strip() for p in parts] + if len(parts) < 2: + continue + name = parts[1] + if name.endswith(";"): + name = name[:-1] + name=name.strip('"') + name=name.strip("'") + if name == "": + continue + + if not name.endswith(".so"): + name = "lib%s.z.so" % name + modules.append(name) + + if not mgr: + return + + for elf in mgr.get_all(): + if elf["name"] in modules: + elf["hdiType"] = "hdi_service" + +if __name__ == "__main__": + parser = HdiParser() + parser.load(None, "/home/z00325844/ohos/vendor/hihope/rk3568/hdf_config/uhdf/device_info.hcs") diff --git a/deps_guard/elf_file_mgr/module_info/__init__.py b/deps_guard/elf_file_mgr/module_info/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..5614bdd5879fa925f07e16d2a2d4bf3fe74c36e8 --- /dev/null +++ b/deps_guard/elf_file_mgr/module_info/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +#coding=utf-8 + +from .compile_info_loader import CompileInfoLoader diff --git a/deps_guard/elf_file_mgr/module_info/compile_info_loader.py b/deps_guard/elf_file_mgr/module_info/compile_info_loader.py new file mode 100755 index 0000000000000000000000000000000000000000..114254b0cd5ad3ddeced46fbb86818e9da57a843 --- /dev/null +++ b/deps_guard/elf_file_mgr/module_info/compile_info_loader.py @@ -0,0 +1,270 @@ +#! /usr/bin/env python +#coding=utf-8 + +import os +import json + +class CompileInfoLoader(object): + @staticmethod + def __load_output_module_info(product_out_path): + try: + with open(os.path.join(product_out_path, "packages/phone/system_module_info.json")) as f: + modules = json.load(f) + except: + print("file info not found.") + return None + + res = [] + for item in modules: + info = {} + info["name"] = item["dest"][0] + if info["name"].startswith("updater/"): + if len(item["dest"]) > 1: + info["name"] = item["dest"][1] + else: + #print("Updater module %s ignored" % info["name"]) + continue + + if "label" in item: + info["labelPath"] = item["label"] + else: + #print("%s has no label" % info["name"]) + info["labelPath"] = "" + pos = info["labelPath"].find("(") + if pos > 0: + info["labelPath"] = info["labelPath"][:pos] + if "subsystem_name" in item: + info["subsystem"] = item["subsystem_name"] + else: + if info["labelPath"].startswith("//build/common"): + info["subsystem"] = "commonlibrary" + else: + info["subsystem"] = "unknown" + if "part_name" in item: + info["componentName"] = item["part_name"] + else: + if info["labelPath"].startswith("//build/common"): + info["componentName"] = "c_utils" + else: + info["componentName"] = "unknown" + if "label_name" in item: + info["moduleName"] = item["label_name"] + else: + info["moduleName"] = "" + info["third_party"] = False + info["chipset"] = False + info["napi"] = False + info["innerapi"] = False + if "shlib_type" in item: + info["shlib_type"] = item["shlib_type"] + if "innerapi" == info["shlib_type"]: + info["innerapi"] = True + if "innerapi_tags" in item: + info["innerapi_tags"] = ",".join(item["innerapi_tags"]) + if "chipsetsdk" in item["innerapi_tags"] or "platformsdk" in item["innerapi_tags"]: + info["innerapi"] = True + info["sa_id"] = 0 + res.append(info) + return res + + @staticmethod + def __load_predefined_module_info(): + cur_file_dir = os.path.dirname(os.path.realpath(__file__)) + with open(os.path.join(cur_file_dir, "modules.json")) as f: + info = json.load(f) + try: + with open(os.path.join(cur_file_dir, "modules-ex.json")) as f: + info_ex = json.load(f) + info = info + info_ex + except: + pass + + return info + + @staticmethod + def load(mgr, product_out_path): + info = CompileInfoLoader.__load_output_module_info(product_out_path) + if not info: + info = CompileInfoLoader.__load_predefined_module_info() + + defaultInfo = { + "subsystem": "unknown", + "componentName": "unknown", + "moduleName": "unknown", + "third_party": False, + "chipset": False, + "napi": False, + "innerapi": False, + "sa_id": 0, + "labelPath": "" + } + + for item in info: + elf = mgr.get_elf_by_path(item["name"]) + if not elf: + continue + for k in defaultInfo.keys(): + elf[k] = item[k] + + unknown_items = [] + for elf in mgr.get_all(): + if "componentName" not in elf: + print("%s does not match in module info file" % (elf["path"])) + unknown = defaultInfo.copy() + unknown["name"] = elf["path"] + unknown["fileName"] = elf["name"] + for k in defaultInfo.keys(): + elf[k] = defaultInfo[k] + unknown_items.append(unknown) + elif elf["componentName"] == "unknown": + print("%s has no componentName info" % (elf["path"])) + unknown = defaultInfo.copy() + unknown["name"] = elf["path"] + for k in defaultInfo.keys(): + defaultInfo[k] = elf[k] + unknown_items.append(unknown) + + if elf["path"].startswith("system/lib64/module/") or elf["path"].startswith("system/lib/module/"): + elf["napi"] = True + + if not elf["path"].startswith("system/"): + elf["chipset"] = True + + # Add if not exists + if "shlib_type" not in elf: + elf["shlib_type"] = "" + if "innerapi_tags" not in elf: + elf["innerapi_tags"] = "" + if elf["labelPath"].startswith("//third_party/"): + elf["third_party"] = True + + if len(unknown_items) > 0: + print("%d modules has no component info" % len(unknown_items)) + with open(os.path.join(product_out_path, "unknown.json"), "w") as f: + res = json.dumps(unknown_items, indent=4) + f.write(res) + + # init platformsdk, chipsetsdk, innerapi flags + for elf in mgr.get_all(): + elf["deps_internal"] = [] + elf["deps_external"] = [] + elf["dependedBy_internal"] = [] + elf["dependedBy_external"] = [] + + elf["modGroup"] = "private" + elf["platformsdk"] = False + elf["chipsetsdk"] = False + + elf["hdiType"] = "" + if elf["shlib_type"] == "hdi_proxy": + elf["hdiType"] = "hdi_proxy" # HDI proxy client library + elif elf["shlib_type"] == "hdi_stub": + elf["hdiType"] = "hdi_stub" # HDI proxy client library + + if elf["name"] in ("libc.so", "libc++.so", "libhilog.so"): + elf["innerapi"] = True + + # Highest priority + if elf["napi"]: + elf["modGroup"] = "publicapi" + + if elf["sa_id"] > 0 or elf["type"] == "bin": + elf["modGroup"] = "pentry" + + # for component dependedBy_internal and dependedBy_external + + platformsdks = [] + chipsetsdks = [] + innerapi_ccs = [] + + for dep in mgr.get_all_deps(): + caller = dep["caller"] + callee = dep["callee"] + + dep["platformsdk"] = False + dep["chipsetsdk"] = False + dep["external"] = False + + # For Inner API modules detection + if caller["componentName"] == callee["componentName"]: + caller["deps_internal"].append(dep) + callee["dependedBy_internal"].append(dep) + #if caller["napi"]: + # dep["external"] = True + else: + caller["deps_external"].append(dep) + callee["dependedBy_external"].append(dep) + dep["external"] = True + + callee["modGroup"] = "innerapi_cc" # Cross component + + if caller["napi"]: + caller["modGroup"] = "publicapi" + + # For Platform SDK modules detection + callee["modGroup"] = "innerapi_chc" # Cross high level component + + dep["platformsdk"] = True + callee["platformsdk"] = True + if callee not in platformsdks: + platformsdks.append(callee) + elif caller["chipset"] != callee["chipset"]: + # For Chipset SDK modules detection + if callee["modGroup"] not in ("publicapi", "pentry"): + callee["modGroup"] = "innerapi_chc" # Cross high level component + + dep["chipsetsdk"] = True + callee["chipsetsdk"] = True + if callee not in chipsetsdks: + chipsetsdks.append(callee) + elif dep["external"] == True: + if callee not in innerapi_ccs: + innerapi_ccs.append(callee) + + # Highest priority + if caller["napi"]: + caller["modGroup"] = "publicapi" + if callee["napi"]: + callee["modGroup"] = "publicapi" + + if caller["sa_id"] > 0 or caller["type"] == "bin": + caller["modGroup"] = "pentry" + if callee["sa_id"] > 0 or callee["type"] == "bin": + callee["modGroup"] = "pentry" + + # Set innerapi_chc_indirect modGroup and platformsdk + for mod in platformsdks: + for m in mod.getAllDependedModules(): + if m not in platformsdks and m not in chipsetsdks: + if m["modGroup"] == "private": + m["modGroup"] = "innerapi_chc_indirect" + #elif m["modGroup"] == "innerapi_cc": + # m["modGroup"] = "innerapi_chc" + + # Set innerapi_chc_indirect modGroup and chipsetsdk + for mod in chipsetsdks: + for m in mod.getAllDependedModules(): + if m not in platformsdks and m not in chipsetsdks: + if m["modGroup"] == "private": + m["modGroup"] = "innerapi_chc_indirect" + #elif m["modGroup"] == "innerapi_cc": + # m["modGroup"] = "innerapi_chc" + + # Set innerapi_cc_indirect + for mod in innerapi_ccs: + if mod["modGroup"] != "innerapi_cc": + continue + for m in mod.getAllDependedModules(): + if m not in innerapi_ccs and m not in platformsdks and m not in chipsetsdks: + if m["modGroup"] == "private": + m["modGroup"] = "innerapi_cc_indirect" + +if __name__ == "__main__": + import sqlite3 + import elf_modules + conn = sqlite3.connect("symdb.db") + cursor = conn.cursor() + + mgr = elf_modules.ElfModuleMgr(cursor) + + #CompileInfoLoader.load(mgr, "modules.json") diff --git a/deps_guard/elf_file_mgr/sa/__init__.py b/deps_guard/elf_file_mgr/sa/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..4a613e318f7268489b9b8442f0c2fe927915f140 --- /dev/null +++ b/deps_guard/elf_file_mgr/sa/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +#coding=utf-8 + +from .sa import SAParser diff --git a/deps_guard/elf_file_mgr/sa/sa.py b/deps_guard/elf_file_mgr/sa/sa.py new file mode 100755 index 0000000000000000000000000000000000000000..7c7388ca4fdda39b6a877fd3a7221d6b6efbce06 --- /dev/null +++ b/deps_guard/elf_file_mgr/sa/sa.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +#coding=utf-8 + +import string +import json +import sys +import os +import xml.etree.ElementTree as ET + +def xml_node_find_by_name(node, name): + for item in node: + if item.tag == name: + return item.text + return None + +class SAParser(object): + @staticmethod + def __parse_sa_profile(all_sa, f): + root = ET.parse(f).getroot() + process = xml_node_find_by_name(root, "process") + for sa in root.findall("systemability"): + libpath = xml_node_find_by_name(sa, "libpath") + sa_key = os.path.basename(libpath) + sa_item = {} + for item in sa: + sa_item[item.tag] = item.text + sa_item["process"] = process + all_sa[sa_key] = sa_item + + @staticmethod + def __add_sa_info(all_sa, mgr): + if not mgr: + return + for mod in mgr.get_all(): + mod["sa_id"] = 0 + if mod["name"] not in all_sa: + continue + mod["sa_id"] = int(all_sa[mod["name"]]["name"]) + + @staticmethod + def load(mgr, out_root_path): + all_sa = {} + path = os.path.join(out_root_path, "packages/phone/system/profile") + for f in os.listdir(path): + full_name = os.path.join(path, f) + if os.path.isfile(full_name) and f.endswith(".xml"): + try: + SAParser.__parse_sa_profile(all_sa, full_name) + except: + pass + + SAParser.__add_sa_info(all_sa, mgr) diff --git a/deps_guard/elf_file_mgr/utils.py b/deps_guard/elf_file_mgr/utils.py new file mode 100755 index 0000000000000000000000000000000000000000..43300d20cc65d566bcfb92afac089e9b0b1ac546 --- /dev/null +++ b/deps_guard/elf_file_mgr/utils.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +#coding=utf-8 + +import os +import sys +import string + +DEBUG_NORMAL = 1 +DEBUG_VERBOSE = 2 +DEBUG_SPAM = 3 + +debuglevel = DEBUG_NORMAL + +def debug(level, *msg): + if debuglevel >= level: + print(' '.join(msg)) + +# return a list of lines of output of the command +def command(command, *args): + debug(DEBUG_SPAM, "calling", command, ' '.join(args)) + pipe = os.popen(command + ' ' + ' '.join(args), 'r') + output = pipe.read().strip() + status = pipe.close() + if status is not None and os.WEXITSTATUS(status) != 0: + print("Command failed with status", os.WEXITSTATUS(status), ":", \ + command, ' '.join(args)) + print("With output:", output) + sys.exit(1) + return [i for i in output.split('\n') if i] diff --git a/deps_guard/rules/ChipsetSDK/README.md b/deps_guard/rules/ChipsetSDK/README.md new file mode 100755 index 0000000000000000000000000000000000000000..ec7a67b941b10804891ea317435871131a0d0608 --- /dev/null +++ b/deps_guard/rules/ChipsetSDK/README.md @@ -0,0 +1,57 @@ +# ChipsetSDK白名单规则说明 + + + +## 1. Chipset SDK的定义 + +系统组件模块:安装到system.img中的模块,与芯片和硬件无关。 + +芯片组件模块:安装到chipset.img中的模块,与芯片或硬件强相关。 + + + +Chipset SDK是指允许被芯片组件模块依赖的系统组件动态库模块集合。 + +Chipset SDK集合中的单个模块称之为ChipsetSDK模块。 + +## 2. 规则解释 + +Chipset SDK白名单规则有三个方面的含义: + +### 2.1 Chipset SDK模块需要在编译模板中标识 + +如下图所示,每个Chipset SDK模块需要在对应的BUILD.gn中通过innerapi_tags增加chipsetsdk标签来标识其类型: + +```go +ohos_shared_library(sample_sa_module) { + ... + innerapi_tags = [ "chipsetsdk" ] + ... +} +``` + +同样,非Chipset SDK不要增加此标记。 + +### 2.2 芯片组件模块不允许依赖Chipset SDK集合之外的模块 + +### 2.3 系统组件模块不允许依赖任何芯片组件模块 + +## 3. 违规场景及处理方案建议 + +### 3.1 检查违规模块的安装组件是否正确 + +如下所示,每个模块是通过install_images选项来决定安装到系统组件还是芯片组件;如果系统组件模块错误的安装到芯片组件,可能会因为该模块依赖了Chipset SDK之外的系统组件模块而违反此规则。 + +```go +ohos_shared_library(sample_module) { + ... + install_images = [ chipset_base_dir ] + ... +} +``` + +因此,首先需要检查违规模块是否安装到了错误的组件。 + +### 3.2 申请新的Chipset SDK模块 + +如果经过分析,需要引入新的Chipset SDK模块,可向架构SIG申请加入Chipset SDK白名单。 diff --git a/deps_guard/rules/ChipsetSDK/whitelist.json b/deps_guard/rules/ChipsetSDK/whitelist.json new file mode 100755 index 0000000000000000000000000000000000000000..0637a088a01e8ddab3bf3fa98dbe804cbde1a0dc --- /dev/null +++ b/deps_guard/rules/ChipsetSDK/whitelist.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/deps_guard/rules/NO-Depends-On-NAPI/README.md b/deps_guard/rules/NO-Depends-On-NAPI/README.md new file mode 100755 index 0000000000000000000000000000000000000000..c0310993ebc40e967bb784bc1d7c9f37af79660a --- /dev/null +++ b/deps_guard/rules/NO-Depends-On-NAPI/README.md @@ -0,0 +1,64 @@ +# NO-Depends-On-NAPI规则说明 + + + +## 1. NAPI模块的定义 + +NAPI模块是指应用import的TS模块对应的接口实现动态库。如下所示: + +```javascript +import @ohos.deviceinfo +``` + +应用import该模块时,本质上是dlopen("/system/lib{64}/module/lib**deviceinfo**.z.so")。这里libdeviceinfo.z.so就是NAPI模块。 + +所有的NAPI模块都是安装在/system/lib{64}/module目录下。 + +## 2. 规则解释 + +NO-Depends-On-NAPI规则如字面意思,不允许任何模块依赖NAPI模块。具体原因如下: + +- 后续linker不会到/system/lib{64}/module下搜索动态库,该路径下的so只能被应用import时通过框架dlopen来访问。 +- 编译框架上会对所有的NAPI模块进行全局符号优化,使得所有的NAPI模块对外只保留如下version script文件中的符号: + +```apl +1.0 { + global: + _init; + _fini; + NAPI_*_GetABCCode; + NAPI_*_GetJSCode; + + local: + *; +}; +``` + +## 3. 违规场景及处理方案建议 + +### 3.1 非NAPI模块安装到了/system/lib{64}/module目录下 + +有些非NAPI模块(并没有实现任何@ohos.xxx TS模块的API)BUILD.gn编写错误,增加了以下字段,导致安装到了NAPI模块的目录: + +```go +relative_install_dir = "module" +``` + +**修改方案**:去掉relative_install_dir字段。 + +### 3.2 其它模块确实使用到了NAPI模块中的符号 + +此场景需要把NAPI模块中被使用到的符号下沉到对应的Inner API模块中供调用者使用,解除对NAPI模块的依赖。 + +### 3.2 ut测试代码需使用NAPI模块中的符号 + +此场景可以为NAPI模块增加静态库目标,ut测试代码deps静态库目标来完成测试。 + + + +## 4. 例外说明 + +由于linker后续不会搜索/system/lib{64},此规则不允许任何例外存在。 + +当前的白名单列表只用于归档存量待整改模块,整改完成后需清零。 + diff --git a/deps_guard/rules/NO-Depends-On-NAPI/whitelist.json b/deps_guard/rules/NO-Depends-On-NAPI/whitelist.json new file mode 100755 index 0000000000000000000000000000000000000000..0d4f877db018085bf2f45650d2634f3f92a82ff3 --- /dev/null +++ b/deps_guard/rules/NO-Depends-On-NAPI/whitelist.json @@ -0,0 +1,31 @@ +[ + "screenlock_client", + "fileio", + "downloadsingle", + "tel_call_manager_api", + "rpc", + "color_picker", + "accessibility", + "stationary", + "remote_file_share_native", + "screenlock_utils", + "window_animation", + "cryptoframework_napi", + "rdb", + "distributeddataobject_impl", + "medialibrary", + "image", + "media_avplayer", + + "rpc", + "uri", + "url", + "xml", + "convertxml", + "buffer", + "medical", + "devicestatus", + "geolocation", + "screenlock", + "screenlock_server" +] diff --git a/deps_guard/rules/NO-Depends-On-SA/README.md b/deps_guard/rules/NO-Depends-On-SA/README.md new file mode 100755 index 0000000000000000000000000000000000000000..c4bdb52a7b3c64b329ec4d8644798f40e943b80b --- /dev/null +++ b/deps_guard/rules/NO-Depends-On-SA/README.md @@ -0,0 +1,104 @@ +# NO-Depends-On-SA规则说明 + + + +## 1. SA模块的定义 + +SA模块是指System Ability对应的动态库模块,一般都在/system/profile/下描述,示例如下: + +```xml + + + telephony + + libtel_cellular_call.z.so + libtel_cellular_data.z.so + libtel_core_service.z.so + libtel_sms_mms.z.so + + ... + +``` + +如上图所示,这些libpath里的动态库都是由SA框架通过dlopen的方式动态加载。 + +## 2. 规则解释 + +NO-Depends-On-SA规则有两个方面的含义: + +### 2.1 SA模块需要在编译模板中标识 + +如下图所示,每个SA模块需要在对应的BUILD.gn中通过shlib_type字段来标识其类型: + +```go +ohos_shared_library(sample_sa_module) { + ... + shlib_type = "sa" + ... +} +``` + +同样,非SA模块不要增加shlib_type标识。 + +### 2.2 不允许依赖SA模块 + +编译框架上ohos_shared_library模板会对所有的SA模块(shlib_type为sa)进行全局符号优化,使得所有的SA模块默认都不对外暴露符号,减小SA模块的大小,同时加快SA模块的加载速度。 + +```go +# Hide symbols for all sa libraries if not specified by version_script +if (defined(invoker.shlib_type) && invoker.shlib_type == "sa") { + if (!defined(invoker.version_script)) { + configs += [ "//build/config/gcc:symbol_visibility_hidden" ] + } +} +``` + +## 3. 违规场景及处理方案建议 + +### 3.1 SA模块没有添加shlib_type标识 + +处理方式:参考2.2章节的描述增加shlib_type = "sa"标识。 + +### 3.2 其它模块确实使用到了SA模块中的符号 + +**方案一**:把SA模块中被使用到的符号下沉到对应的Inner API模块中供调用者使用,解除对SA模块的依赖。 + + + +**方案二**:对于支持插件扩展的SA模块,需要提供API给插件调用。此场景可以通过version_script来显式申明SA模块需要对外提供的符号,示例如下: + +```go +ohos_shared_library(partly_exported_symbols) { + ... + version_script = "libbeget_proxy.versionscript" + ... +} +``` + +示例的version_script文件如下: + +```apl +1.0 { + global: + AclGetDevUdid; + AclGetSerial; + ServiceWatchForStatus; + SystemWatchParameter; + WatchParameter; + Remove*Watcher; + local: + *; +}; +``` + +> 提示:如果符号名称有规律,可以使用*作为通配符简化version_script编写的工作量;如Acl\*;。 + +### 3.3 ut测试代码需使用SA模块中的符号 + +此场景可以为SA模块增加静态库目标,ut测试代码deps静态库目标来完成测试。 + +## 4. 例外说明 + +SA模块默认都不对外暴露符号;如需提供符号,通过version_script来申明,无其它例外选项。 + +当前的白名单列表只用于归档存量待整改模块,整改完成后需清零。 \ No newline at end of file diff --git a/deps_guard/rules/NO-Depends-On-SA/whitelist.json b/deps_guard/rules/NO-Depends-On-SA/whitelist.json new file mode 100755 index 0000000000000000000000000000000000000000..0637a088a01e8ddab3bf3fa98dbe804cbde1a0dc --- /dev/null +++ b/deps_guard/rules/NO-Depends-On-SA/whitelist.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/deps_guard/rules_checker/__init__.py b/deps_guard/rules_checker/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..46e312058782cce0480f541e744587ea25354595 --- /dev/null +++ b/deps_guard/rules_checker/__init__.py @@ -0,0 +1,15 @@ +#! /usr/bin/env python +#coding=utf-8 + +from .napi_rule import NapiRule + +def check_all_rules(mgr, args): + passed = True + napi = NapiRule(mgr, args) + if not napi.check(): + passed = False + + if args.no_fail: + return True + + return passed diff --git a/deps_guard/rules_checker/base_rule.py b/deps_guard/rules_checker/base_rule.py new file mode 100755 index 0000000000000000000000000000000000000000..8005298120f66c29ae4ba1b3d86a3bcd7032a255 --- /dev/null +++ b/deps_guard/rules_checker/base_rule.py @@ -0,0 +1,40 @@ +#! /usr/bin/env python +#coding=utf-8 + +import os +import json + +class BaseRule(object): + RULE_NAME = "" + + def __init__(self, mgr, args): + self._mgr = mgr + self.__load_white_lists(args) + + def __load_white_lists(self, args): + res = [] + if args and args.rules: + rules_path = args.rules + else: + rules_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../rules") + + rules_file = os.path.join(rules_path, self.__class__.RULE_NAME, "whitelist.json") + with open(rules_file, "rb") as f: + self.__white_lists = json.load(f) + + def get_mgr(self): + return self._mgr + + def get_white_lists(self): + return self.__white_lists + + def log(self, info): + print(info) + + def get_help_url(self): + return "https://gitee.com/openharmony/developtools_integration_verification/deps_guard/rules/%s/README.md" % self.__class__.RULE_NAME + + # To be override + def check(self): + # Default pass + return True diff --git a/deps_guard/rules_checker/napi_rule.py b/deps_guard/rules_checker/napi_rule.py new file mode 100755 index 0000000000000000000000000000000000000000..93594bb5ce8e15828bf1412f5cb74a28e4bcdadb --- /dev/null +++ b/deps_guard/rules_checker/napi_rule.py @@ -0,0 +1,42 @@ +#! /usr/bin/env python +#coding=utf-8 + +import json + +from .base_rule import BaseRule + +class NapiRule(BaseRule): + RULE_NAME = "NO-Depends-On-NAPI" + + def __check_depends_on_napi(self): + lists = self.get_white_lists() + + passed = True + + # Check if any napi modules has dependedBy + for mod in self.get_mgr().get_all(): + #print("Check %s now " % mod["path"]) + if not mod["napi"]: + continue + + if len(mod["dependedBy"]) == 0: + continue + + targetName = mod["labelPath"][mod["labelPath"].find(":")+1:] + if targetName in lists: + continue + + self.log("NOT ALLOWED: napi module %s depended by:" % mod["name"]) + for dep in mod["dependedBy"]: + caller = dep["caller"] + self.log(" module [%s] defined in [%s]" % (caller["name"], caller["labelPath"])) + passed = False + + if not passed: + self.log(" Please refer to: %s" % self.get_help_url()) + + return passed + + def check(self): + self.log("Do %s rule checking now:" % self.__class__.RULE_NAME) + return self.__check_depends_on_napi()