diff --git a/hb/modules/ohos_indep_build_module.py b/hb/modules/ohos_indep_build_module.py index 940956846186b552db203b113c2bc12c960c350c..b29c44e823c4538d5bce3104b000cc7b411a36b7 100644 --- a/hb/modules/ohos_indep_build_module.py +++ b/hb/modules/ohos_indep_build_module.py @@ -26,6 +26,7 @@ from resolver.indep_build_args_resolver import get_part_name from containers.arg import BuildPhase import argparse import sys +import os class OHOSIndepBuildModule(IndepBuildModuleInterface): @@ -97,9 +98,17 @@ class OHOSIndepBuildModule(IndepBuildModuleInterface): LogUtil.hb_info(f'{",".join(get_part_name())} {message}') def _target_compilation(self): + self._rename_buildlog() self._run_prebuilts() self._run_hpm() self._run_indep_build() + + def _rename_buildlog(self): + variant = self.args_dict.get("variant").arg_value + logpath = os.path.join('out', variant, 'build.log') + if os.path.exists(logpath): + mtime = os.stat(logpath).st_mtime + os.rename(logpath, '{}/build.{}.log'.format(os.path.dirname(logpath), mtime)) def _run_prebuilts(self): self._run_phase(BuildPhase.PRE_BUILD) diff --git a/hb/resolver/indep_build_args_resolver.py b/hb/resolver/indep_build_args_resolver.py index 7659b88e9bff9bd0fd473d1d0482de7d80c9f9ed..11c927ccab90e15061e240f17d1b1e60e76aff7f 100644 --- a/hb/resolver/indep_build_args_resolver.py +++ b/hb/resolver/indep_build_args_resolver.py @@ -55,16 +55,14 @@ def _search_bundle_path(part_name: str) -> str: bundle_path = None try: bundle_path = search_bundle_file_from_ccache(part_name) - LogUtil.hb_info(f"Searching bundle.json path in ccache") if not bundle_path: bundle_path = ComponentUtil.search_bundle_file(part_name) - LogUtil.hb_info(f"Searching bundle.json path in source code tree") else: LogUtil.hb_info( "The bundle.json path of component {} is {}, if it's incorrect, please delete {} and try again. ".format( part_name, bundle_path, COMPONENTS_PATH_DIR)) except Exception as e: - raise OHOSException('Please check the bundle.json file of {} : {}'.format(part_name, e)) + raise OHOSException('Please check the bundle.json files you updated : {}'.format(e)) if not bundle_path: LogUtil.hb_info('ERROR argument "hb build ": Invalid part_name "{}". '.format(part_name)) sys.exit(1) @@ -222,6 +220,8 @@ class IndepBuildArgsResolver(ArgsResolverInterface): @staticmethod def resolve_build_target(target_arg: Arg, indep_build_module: IndepBuildModuleInterface): + if "--build-target" in sys.argv and not target_arg.arg_value: + raise OHOSException("ERROR argument \"--build-target\": no build target. ") indep_build_module.indep_build.regist_flag('build-target', target_arg.arg_value) @staticmethod @@ -293,4 +293,8 @@ class IndepBuildArgsResolver(ArgsResolverInterface): LogUtil.hb_info(f"Failed to execute ccache command: {e}") def resolve_prebuilts_download(self, target_arg: Arg, indep_build_module: IndepBuildModuleInterface): - indep_build_module.prebuilts.regist_flag('skip-prebuilts', target_arg.arg_value) \ No newline at end of file + indep_build_module.prebuilts.regist_flag('skip-prebuilts', target_arg.arg_value) + + def resolve_local_binarys(self, target_arg: Arg, indep_build_module: IndepBuildModuleInterface): + indep_build_module.indep_build.regist_flag('local-binarys', target_arg.arg_value) + indep_build_module.hpm.regist_flag('local-binarys', target_arg.arg_value) \ No newline at end of file diff --git a/hb/resources/args/default/indepbuildargs.json b/hb/resources/args/default/indepbuildargs.json index be351ba5771670f693c4fd73f97220f508f7b827..567716cd874a064081857f04722aa7d300a8b715 100644 --- a/hb/resources/args/default/indepbuildargs.json +++ b/hb/resources/args/default/indepbuildargs.json @@ -171,5 +171,15 @@ "arg_attribute": {}, "resolve_function": "resolve_ccache", "testFunction": "" + }, + "local_binarys": { + "arg_name": "--local-binarys", + "argDefault": "", + "arg_help": "Default:''. Help:Set local binarys path, so that you can use local packages instead of downloading from remote", + "arg_phase": ["hpmDownload","indepCompilation"], + "arg_type": "str", + "arg_attribute": {}, + "resolve_function": "resolve_local_binarys", + "testFunction": "" } } \ No newline at end of file diff --git a/hb/services/hpm.py b/hb/services/hpm.py index e621bfc12f3e884694147687db87b74542040eff..c3e772a6ce39706b23de3ac8b4b9895872ba74e9 100644 --- a/hb/services/hpm.py +++ b/hb/services/hpm.py @@ -46,9 +46,9 @@ class Hpm(BuildFileGeneratorInterface): def __init__(self): super().__init__() - self._regist_hpm_path() def run(self): + self._regist_hpm_path() self.execute_hpm_cmd(CMDTYPE.BUILD) @throw_exception @@ -112,17 +112,15 @@ class Hpm(BuildFileGeneratorInterface): @throw_exception def _execute_hpm_build_cmd(self, **kwargs): - if self.flags_dict.get("skip-download") or self.flags_dict.get("fast-rebuild"): + if self._check_skip_download(): + LogUtil.hb_info("Skip download binary dependencies") return else: - self.flags_dict.pop("skip-download") + self._pop_useless_flags() LogUtil.hb_info("Tips: If you want to skip download binary dependencies, please use --skip-download") hpm_build_cmd = [self.exec, "build"] + self._convert_flags() variant = hpm_build_cmd[hpm_build_cmd.index("--variant") + 1] logpath = os.path.join('out', variant, 'build.log') - if os.path.exists(logpath): - mtime = os.stat(logpath).st_mtime - os.rename(logpath, '{}/build.{}.log'.format(os.path.dirname(logpath), mtime)) self._run_hpm_cmd(hpm_build_cmd, log_path=logpath) @throw_exception @@ -146,19 +144,17 @@ class Hpm(BuildFileGeneratorInterface): self._run_hpm_cmd(hpm_update_cmd) def _run_hpm_cmd(self, cmd, log_path): - LogUtil.hb_info(f"Hpm cmd is: {cmd}") - ret_code = SystemUtil.exec_command( + cmd_str = " ".join(cmd) + SystemUtil.exec_command( cmd, log_path=log_path, - pre_msg="start run hpm command", + pre_msg=f"Executing hpm command: {cmd_str}", after_msg="end hpm command", custom_line_handle=self._custom_line_handle, ) hpm_info = get_hpm_check_info() if hpm_info: print(hpm_info) - if ret_code != 0: - raise OHOSException(f"ERROR: hpm command failed, cmd: {cmd}", "0001") def _custom_line_handle(self, line): @@ -247,3 +243,20 @@ class Hpm(BuildFileGeneratorInterface): illegal_components.append(component) if illegal_components: raise OHOSException('ERROR argument "--parts": Invalid parts "{}". '.format(illegal_components)) + + def _check_skip_download(self): + if self.flags_dict.get("skip-download"): + return True + if self.flags_dict.get("fast-rebuild"): + return True + if self.flags_dict.get("local-binarys"): + return True + return False + + def _pop_useless_flags(self): + if "skip-download" in self.flags_dict: + self.flags_dict.pop("skip-download") + if "fast-rebuild" in self.flags_dict: + self.flags_dict.pop("fast-rebuild") + if "local-binarys" in self.flags_dict: + self.flags_dict.pop("local-binarys") \ No newline at end of file diff --git a/hb/services/indep_build.py b/hb/services/indep_build.py index 973077cab4b4f31a00ed13d25b4a07a001696173..6695012e87776c7e58f404f3d418ee9576becfd3 100644 --- a/hb/services/indep_build.py +++ b/hb/services/indep_build.py @@ -20,6 +20,9 @@ from services.interface.build_file_generator_interface import BuildFileGenerator from util.system_util import SystemUtil from exceptions.ohos_exception import OHOSException import os +from util.log_util import LogUtil +import json +import shutil class IndepBuild(BuildFileGeneratorInterface): @@ -33,19 +36,17 @@ class IndepBuild(BuildFileGeneratorInterface): cmd.extend(flags_list) variant = self.flags_dict["variant"] logpath = os.path.join('out', variant, 'build.log') - if self.flags_dict.get("skip-download") or self.flags_dict.get("fast-rebuild"): - if os.path.exists(logpath): - mtime = os.stat(logpath).st_mtime - os.rename(logpath, '{}/build.{}.log'.format(os.path.dirname(logpath), mtime)) - - ret_code = SystemUtil.exec_command(cmd, log_path=logpath, pre_msg="run indep build", + SystemUtil.exec_command(cmd, log_path=logpath, pre_msg="run indep build", after_msg="indep build end") - if ret_code != 0: - raise OHOSException(f'ERROR: build_indep.sh encountered a problem, please check, cmd: {cmd}', '0001') def _convert_flags(self) -> list: flags_list = [] - flags_list.append(os.path.join(os.path.expanduser("~"), ".hpm/.hpmcache")) + if self.flags_dict["local-binarys"]: + flags_list.append(self.flags_dict["local-binarys"]) + self._generate_dependences_json(self.flags_dict["local-binarys"]) + self.flags_dict.pop("local-binarys") + else: + flags_list.append(os.path.join(os.path.expanduser("~"), ".hpm/.hpmcache")) flags_list.append(self.flags_dict["path"]) build_type = self.flags_dict["buildType"] if build_type == "both": @@ -69,3 +70,58 @@ class IndepBuild(BuildFileGeneratorInterface): flags_list.append(f"--{key}") flags_list.extend(self.flags_dict[key]) return flags_list + + def _generate_dependences_json(self, local_binarys: str): + if not os.path.exists(local_binarys): + raise Exception(f"ERROR: local binarys {local_binarys} does not exist, please check") + + dependences_json = os.path.join(local_binarys, "dependences.json") + dirname = os.path.basename(local_binarys) + if os.path.exists(dependences_json) and dirname == ".hpmcache": + LogUtil.hb_info(f"use dependences.json under .hpmcache, skip generating") + return + + dependences_dict = dict() + binarys_path = os.path.join(local_binarys, "binarys") + flag_path = os.path.join(binarys_path, "binarys_flag") + if os.path.exists(binarys_path): + if os.path.exists(flag_path): + LogUtil.hb_info(f"remove {binarys_path}") + shutil.rmtree(binarys_path) + else: + renamed_path = os.path.join(local_binarys, "renamed_binarys") + if os.path.exists(renamed_path): + LogUtil.hb_info(f"remove {renamed_path}") + shutil.rmtree(renamed_path) + LogUtil.hb_info(f"rename {binarys_path} to {renamed_path}") + shutil.move(binarys_path, renamed_path) + LogUtil.hb_info(f"create {binarys_path}") + os.makedirs(binarys_path) + + for item in os.listdir(local_binarys): + if item == "binarys": + continue + item_path = os.path.join(local_binarys, item) + if os.path.isdir(item_path): + os.symlink(item_path, os.path.join(binarys_path, item)) + open(flag_path, "w").close() + + ignore_directories = ['innerapis', 'common', 'binarys'] + for root, dirs, files in os.walk(local_binarys): + dirs[:] = [d for d in dirs if d not in ignore_directories] + for file_name in files: + if file_name == "bundle.json": + self._update_dependences_dict(local_binarys, root, file_name, dependences_dict) + LogUtil.hb_info(f"generating {dependences_json}") + with open(dependences_json, 'w') as f: + json.dump(dependences_dict, f, indent=4) + + def _update_dependences_dict(self, local_binarys: str, root: str, file_name: str, dependences_dict: dict): + bundle_json_path = os.path.join(root, file_name) + with open(bundle_json_path, 'r') as f: + bundle_data = json.load(f) + component_name = bundle_data["component"]["name"] + relative_path = os.path.relpath(root, local_binarys) + dependences_dict[component_name] = { + "installPath": "/" + relative_path + } \ No newline at end of file diff --git a/hb/services/prebuilts.py b/hb/services/prebuilts.py index 20d9c6c48d8f982268bccb3fab18ee824c028201..3347f720ac71ead341c4d58aebc2165e03a3013c 100644 --- a/hb/services/prebuilts.py +++ b/hb/services/prebuilts.py @@ -22,21 +22,29 @@ from services.interface.build_file_generator_interface import ( BuildFileGeneratorInterface, ) from util.log_util import LogUtil +import os +import json +import time class PreuiltsService(BuildFileGeneratorInterface): def __init__(self): + ohos_dir = self.get_ohos_dir() + self.last_update = os.path.join(ohos_dir, "prebuilts/.local_data/last_update.json") super().__init__() def run(self): - if not "--open-prebuilts" in sys.argv: + if not "--enable-prebuilts" in sys.argv: return + part_names = self._get_part_names() + if not self.check_whether_need_update(part_names): + LogUtil.hb_info("you have already execute prebuilts download step and no configs changed, skip this step") + return flags_list = self._convert_flags() if "--skip-prebuilts" in flags_list: print("Skip preuilts download") return - part_names = self._get_part_names() try: cmd = ["/bin/bash", "build/prebuilts_config.sh", "--part-names"] cmd.extend(part_names) @@ -48,11 +56,88 @@ class PreuiltsService(BuildFileGeneratorInterface): LogUtil.hb_info(tips) subprocess.run( cmd, check=True, stdout=None, stderr=None # 直接输出到终端 - ) # 直接输出到终端 + ) + self.write_last_update({"last_update_time": time.time(), "parts": part_names}) except subprocess.CalledProcessError as e: print(f"{cmd} execute failed: {e.returncode}") raise e + def check_whether_need_update(self, part_names) -> bool: + last_update = self.read_last_update() + last_update_time = last_update.get("last_update_time", 0) + last_update_parts = last_update.get("parts", []) + # 判断是否有上次下载记录,没有则需要重新执行预下载 + if not last_update_time: + LogUtil.hb_info("No last update record found, will update prebuilts") + return True + else: + # 判断预下载相关的配置文件和脚本是否有变更,若有则需要重新执行预下载 + if self.check_file_changes(): + LogUtil.hb_info("Prebuilts config file has changed, will update prebuilts") + return True + # 判断独立编译的部件是否有变更,若有则需要重新执行预下载 + if part_names and not set(part_names).issubset(set(last_update_parts)): + LogUtil.hb_info("The specified part names have changed, will update prebuilts") + return True + return False + + def read_last_update(self): + if not os.path.exists(self.last_update): + return {} + try: + with open(self.last_update, 'r') as f: + return json.load(f) + except Exception as e: + LogUtil.hb_error(f"Failed to read last update file: {e}") + return {} + + def write_last_update(self, data): + os.makedirs(os.path.dirname(self.last_update), exist_ok=True) + try: + with open(self.last_update, 'w') as f: + json.dump(data, f, indent=4) + except Exception as e: + LogUtil.hb_error(f"Failed to write last update file: {e}") + + def get_ohos_dir(self): + cur_dir = os.getcwd() + while cur_dir != "/": + global_var = os.path.join( + cur_dir, 'build', 'hb', 'resources', 'global_var.py') + if os.path.exists(global_var): + return cur_dir + cur_dir = os.path.dirname(cur_dir) + raise Exception("you must run this script in ohos dir") + + def get_preguilt_download_related_files_mtimes(self) -> dict: + dir_path = os.path.join(self.get_ohos_dir(), "build/prebuilts_service") + mtimes = {} + for root, _, files in os.walk(dir_path): + for file in files: + file_path = os.path.join(root, file) + mtimes[file_path] = os.path.getmtime(file_path) + prebuilts_config_json_path = os.path.join(self.get_ohos_dir(), "build/prebuilts_config.json") + prebuilts_config_py_path = os.path.join(self.get_ohos_dir(), "build/prebuilts_config.py") + prebuilts_config_shell_path = os.path.join(self.get_ohos_dir(), "build/prebuilts_config.sh") + mtimes.update({prebuilts_config_json_path: os.path.getmtime(prebuilts_config_json_path)}) + mtimes.update({prebuilts_config_py_path: os.path.getmtime(prebuilts_config_py_path)}) + mtimes.update({prebuilts_config_shell_path: os.path.getmtime(prebuilts_config_shell_path)}) + return mtimes + + def check_file_changes(self) -> bool: + """ + check if the directory has changed by comparing file modification times. + :param dir_path: directory + :param prev_mtimes: last known modification times of files in the directory + :return: if the directory has changed, and the current modification times of files in the directory + """ + last_update = self.read_last_update().get("last_update", 0) + current_mtimes = self.get_preguilt_download_related_files_mtimes() + for _, mtime in current_mtimes.items(): + if mtime > last_update: + return True + return False + def _get_part_names(self): part_name_list = [] if len(sys.argv) > 2 and not sys.argv[2].startswith("-"): @@ -74,4 +159,4 @@ class PreuiltsService(BuildFileGeneratorInterface): if isinstance(self.flags_dict[key], list) and self.flags_dict[key]: flags_list.append(f"--{key}") flags_list.extend(self.flags_dict[key]) - return flags_list + return flags_list \ No newline at end of file diff --git a/hb/util/component_util.py b/hb/util/component_util.py index c6f09cf2cb1f47de07ec3fa043b29f2293e6a825..25d70cc5078c30a196fe5b4de4028fecef53d801 100644 --- a/hb/util/component_util.py +++ b/hb/util/component_util.py @@ -129,8 +129,9 @@ def _recurrent_search_bundle_file(path: str): def get_all_bundle_path(path): bundles_path = {} for root, dirnames, filenames in os.walk(path): - if root == os.path.join(path, "out") or root == os.path.join(path, ".repo"): - continue + if root == path: + # ignore out, .repo, binarys, prebuilts, kernel directory in top level + dirnames[:] = [d for d in dirnames if d not in ["out", ".repo", "binarys", "prebuilts", "kernel"]] for filename in filenames: if filename == "bundle.json": bundle_json = os.path.join(root, filename) diff --git a/hb/util/io_util.py b/hb/util/io_util.py index d9e1349019b81ecc412b012bcad4ba5748ad7735..2b635b20eedb42861a8e9713c62952ce0a363af2 100755 --- a/hb/util/io_util.py +++ b/hb/util/io_util.py @@ -32,10 +32,13 @@ class IoUtil(metaclass=NoInstance): def read_json_file(input_file: str) -> dict: if not os.path.isfile(input_file): raise OHOSException(f'{input_file} not found', '0008') - - with open(input_file, 'rb') as input_f: - data = json.load(input_f) - return data + try: + with open(input_file, 'rb') as input_f: + data = json.load(input_f) + return data + except json.JSONDecodeError as e: + print(f'Error reading JSON file {input_file}') + raise e @staticmethod def dump_json_file(dump_file: str, json_data: dict or list): diff --git a/hb/util/system_util.py b/hb/util/system_util.py index d29955675c7bd30a47415e26974bdd898672fa31..024ad19806de15bc67f0813b819facc5f7f339b7 100755 --- a/hb/util/system_util.py +++ b/hb/util/system_util.py @@ -126,8 +126,9 @@ class SystemUtil(metaclass=NoInstance): ret_code = process.returncode if ret_code != 0: + cmd_str = " ".join(cmd) + LogUtil.hb_error(f"command failed: \"{cmd_str}\" , ret_code: {ret_code}") LogUtil.get_failed_log(log_path) - return ret_code @staticmethod def get_current_time(time_type: str = 'default'): diff --git a/indep_configs/build_indep.sh b/indep_configs/build_indep.sh index 27f29303f0804e07dc3ab138db0b8ad67374cf24..3c36c232583803797d504cb0d99fbb30a91d7575 100755 --- a/indep_configs/build_indep.sh +++ b/indep_configs/build_indep.sh @@ -12,6 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +cleanup(){ + rm -rf .gn + ln -s build/core/gn/dotfile.gn .gn +} + +trap cleanup EXIT + set -e echo $1 $2 $3 TEST_FILTER=$3 @@ -84,10 +91,23 @@ fi if [ -d "binarys/third_party/typescript" ];then echo "typescript directory exists" + typescript_tgz_path="binarys/third_party/typescript/innerapis/build_typescript_etc/libs/ohos-typescript-4.9.5-r4.tgz" + if [ -f $typescript_tgz_path ]; then + echo "ohos-typescript-4.9.5-r4.tgz exists, starting extraction..." + tar -zxvf "$typescript_tgz_path" -C "binarys/third_party/typescript/innerapis/build_typescript_etc/libs/" + if [ $? -eq 0 ]; then + echo "Extraction completed successfully" + else + echo "Error: Failed to extract the tgz file" + fi + else + echo "Error: ohos-typescript-4.9.5-r4.tgz does not exists" + fi if [ ! -d "third_party/typescript" ]; then echo "third_party/typescript not exist, copy from binarys." mkdir -p "third_party" cp -r binarys/third_party/typescript third_party + cp -r binarys/third_party/typescript/innerapis/build_typescript_etc/libs/package/* third_party/typescript/ fi fi ${PYTHON3} ${SOURCE_ROOT_DIR}/build/indep_configs/scripts/generate_target_build_gn.py -p $2 -rp ${SOURCE_ROOT_DIR} -t ${TEST_FILTER} @@ -99,9 +119,5 @@ if [ $? -ne 0 ]; then exit 1 fi -rm -rf .gn -ln -s build/core/gn/dotfile.gn .gn - echo -e "\033[0;33myou can use --skip-download to skip download binary dependencies while using hb build command\033[0m" - exit 0 diff --git a/indep_configs/config/binary_package_exclusion_list.json b/indep_configs/config/binary_package_exclusion_list.json index 9dad946525b2ef942a733775fcd250eb82d47dbe..564f0f999bbf1c776b60402b1ab857fce2a9e708 100644 --- a/indep_configs/config/binary_package_exclusion_list.json +++ b/indep_configs/config/binary_package_exclusion_list.json @@ -1,15 +1,6 @@ [ "googletest", "benchmark", - "musl", - "rust", - "developer_test", - "drivers_interface_display", - "skia", - "hilog", - "runtime_core", - "hisysevent", - "drivers_interface_usb", - "rust_cxx", - "rust_bindgen" + "rust_bindgen", + "css-what" ] \ No newline at end of file diff --git a/indep_configs/config/download_part_whitelist.json b/indep_configs/config/download_part_whitelist.json index 1ee7e1309a24634ab25f7ddf0c143bc4606b7401..fb7606101a0e380dfa2759782535a3635cf1aa83 100644 --- a/indep_configs/config/download_part_whitelist.json +++ b/indep_configs/config/download_part_whitelist.json @@ -16,5 +16,7 @@ "file_api": ["typescript"], "asset": ["rust", "rust_cxx", "rust_libc", "rust_syn", "rust_proc_macro2", "rust_quote", "rust_unicode_ident", "rust_bindgen"], "request": ["rust", "rust_cxx", "rust_libc", "rust_syn", "rust_proc_macro2", "rust_quote", "rust_unicode_ident", "rust_bindgen"], - "datamgr_service": ["rust", "rust_cxx", "rust_libc", "rust_syn", "rust_proc_macro2", "rust_quote", "rust_unicode_ident", "rust_bindgen", "ylong_runtime"] + "datamgr_service": ["rust", "rust_cxx", "rust_libc", "rust_syn", "rust_proc_macro2", "rust_quote", "rust_unicode_ident", "rust_bindgen", "ylong_runtime"], + "netstack": ["rust", "rust_cxx", "rust_libc", "rust_syn", "rust_proc_macro2", "rust_quote", "rust_unicode_ident", "rust_bindgen", "ylong_runtime"], + "ylong_http": ["rust", "rust_cxx", "rust_libc", "rust_syn", "rust_proc_macro2", "rust_quote", "rust_unicode_ident", "rust_bindgen"] } \ No newline at end of file diff --git a/indep_configs/scripts/generate_components.py b/indep_configs/scripts/generate_components.py index b3aa574d1ae14806bb4c1e2d706d490f19b30959..a955c4a6c8e4a0a466d4e38f9419e65d7fe45068 100755 --- a/indep_configs/scripts/generate_components.py +++ b/indep_configs/scripts/generate_components.py @@ -182,7 +182,7 @@ def _gen_components_info(components_json, bundle_json, part_name, src_build_name else: innerapi_label = "{}:{}".format(os.path.join("//binarys", path, "innerapis", innerapi_name), innerapi_name) innerapi_value_list.append({"name": innerapi_name, "label": innerapi_label}) - if innerapi_name in _part_toolchain_map_dict.keys(): + if innerapi_name in _part_toolchain_map_dict.keys() and part_name not in src_build_name_list: _name = innerapi_name innerapi_name = f"{innerapi_name}({_part_toolchain_map_dict[_name]['toolchain_value']})" innerapi_label = "{}:{}".format(os.path.join("//binarys", path, "innerapis", diff --git a/indep_configs/scripts/generate_target_build_gn.py b/indep_configs/scripts/generate_target_build_gn.py index a09b9dd1a76b9df8a2108c9aa05f66f645a20bac..9ddc27895470c7e78c409db91fed1e3e55dd4a8c 100755 --- a/indep_configs/scripts/generate_target_build_gn.py +++ b/indep_configs/scripts/generate_target_build_gn.py @@ -181,11 +181,19 @@ def process_build_target_list(build_target_list, module_to_path, deps_list): if item.startswith("//"): # 如果是全路径,直接添加 add_to_deps_list(deps_list, item) + elif os.sep in item: + if item.startswith('/'): + add_to_deps_list(deps_list, item) + else: + add_to_deps_list(deps_list, "//" + item) else: # 如果不是全路径,查找对应的module fullpath = module_to_path.get(item) if fullpath: add_to_deps_list(deps_list, fullpath) + else: + raise Exception(f"Error: The build target {item} was not found in the build configuration of bundle.json. " + + "If you really want to specify this target for building, please use the full path.") return deps_list diff --git a/indep_configs/scripts/gn_ninja_cmd.py b/indep_configs/scripts/gn_ninja_cmd.py index 85de6ea92bc29da9510c6e599bce94bdf3a6d7e7..4ed78f5e84cec9f3c588ae06c336594b2ed560e1 100644 --- a/indep_configs/scripts/gn_ninja_cmd.py +++ b/indep_configs/scripts/gn_ninja_cmd.py @@ -34,11 +34,17 @@ def _run_cmd(cmd: list): process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8" ) - for line in iter(process.stdout.readline, ""): + while True: + line = process.stdout.readline() + if not line and process.poll() is not None: + break print(line, end="") - process_status = process.poll() - if process_status: - sys.exit(process_status) + remaining_output, _ = process.communicate() + if remaining_output: + print(remaining_output, end='', flush=True) + exit_code = process.returncode + if exit_code: + sys.exit(exit_code) def _get_args(): diff --git a/prebuilts_config.json b/prebuilts_config.json index ca2274b4c46dbe496ba55766aca953087151584e..9c9244724de2bcb0704328aa2aa59ec537b9cc69 100644 --- a/prebuilts_config.json +++ b/prebuilts_config.json @@ -3,7 +3,7 @@ "tool_list": [ { "name": "ark_js_prebuilts", - "tag": "base", + "tag": "ark_js", "type": "src, indep", "config": { "linux": { @@ -34,41 +34,41 @@ "config": { "linux": { "arm64": { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/linux_aarch64/clang_linux_aarch64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/linux_aarch64/clang_linux_aarch64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/linux-aarch64", - "rename_src": "${unzip_dir}/clang_linux_aarch64-2db1e1-20250605", + "rename_src": "${unzip_dir}/clang_linux_aarch64-115b62-20250811", "type": "src" }, "x86_64": [ { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/ohos_arm64/clang_ohos-arm64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/ohos_arm64/clang_ohos-arm64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/ohos-arm64", - "rename_src": "${unzip_dir}/clang_ohos-arm64-2db1e1-20250605", + "rename_src": "${unzip_dir}/clang_ohos-arm64-115b62-20250811", "type": "src" }, { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/windows/clang_windows-x86_64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/windows/clang_windows-x86_64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/windows-x86_64", - "rename_src": "${unzip_dir}/clang_windows-x86_64-2db1e1-20250605", + "rename_src": "${unzip_dir}/clang_windows-x86_64-115b62-20250811", "type": "src" }, { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/linux/clang_linux-x86_64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/linux/clang_linux-x86_64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/linux-x86_64", - "rename_src": "${unzip_dir}/clang_linux-x86_64-2db1e1-20250605" + "rename_src": "${unzip_dir}/clang_linux-x86_64-115b62-20250811" } ] }, "darwin": { "arm64": { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/darwin_arm64/clang_darwin-arm64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/darwin_arm64/clang_darwin-arm64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/darwin-arm64", - "rename_src": "${unzip_dir}/clang_darwin-arm64-2db1e1-20250605" + "rename_src": "${unzip_dir}/clang_darwin-arm64-115b62-20250811" }, "x86_64": { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/darwin_x86_64/clang_darwin-x86_64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/darwin_x86_64/clang_darwin-x86_64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/darwin-x86_64", - "rename_src": "${unzip_dir}/clang_darwin-x86_64-2db1e1-20250605" + "rename_src": "${unzip_dir}/clang_darwin-x86_64-115b62-20250811" } } }, @@ -85,6 +85,60 @@ } ] }, + + { + "name": "cangjie", + "tag": "cangjie", + "type": "src, indep", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk", + "config": { + "linux": { + "x86_64": [ + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-linux-x64-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/linux-x64", + "unzip_filename": "cangjie-linux-x64" + }, + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-windows-x64-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/windows-x64", + "unzip_filename": "cangjie-windows-x64" + }, + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-api-tools-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/build-tools", + "unzip_filename": "cangjie-api-tools" + } + ] + }, + "darwin": { + "x86_64": [ + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-mac-x64-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/mac-x64", + "unzip_filename": "cangjie-mac-x64" + }, + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-api-tools-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/build-tools", + "unzip_filename": "cangjie-api-tools" + } + ], + "arm64": [ + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-mac-aarch64-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/mac-aarch64", + "unzip_filename": "cangjie-mac-aarch64" + }, + { + "remote_url": "/openharmony/compiler/cangjie/1.0.0/cangjie-api-tools-1.0.0.zip", + "unzip_dir": "${code_dir}/prebuilts/cangjie_sdk/build-tools", + "unzip_filename": "cangjie-api-tools" + } + ] + } + } + }, { "name": "gn", "tag": "base", @@ -93,7 +147,7 @@ "config": { "linux": { "x86_64": { - "remote_url": "/openharmony/compiler/gn/20250620/gn-linux-x86-20250620.tar.gz", + "remote_url": "/openharmony/compiler/gn/20250804/gn-linux-x86-20250804.tar.gz", "unzip_dir": "${code_dir}/prebuilts/build-tools/linux-x86/bin" }, "arm64": { @@ -111,7 +165,7 @@ }, { "name": "hc-gen", - "tag": "base", + "tag": "hc-gen", "type": "src, indep", "config": { "linux": { @@ -125,7 +179,7 @@ }, { "name": "OpenHarmonyApplication.pem", - "tag": "base", + "tag": "app", "type": "indep", "config": { "linux": { @@ -223,7 +277,7 @@ }, { "name": "packing_tool", - "tag": "base", + "tag": "packing_tool", "type": "src, indep", "config": { "linux, darwin": { @@ -247,17 +301,17 @@ "unzip_filename": "linux-arm64" }, "x86_64": { - "remote_url": "/openharmony/compiler/python/3.11.4/linux/python-linux-x86-GLIBC2.27-3.11.4_20250219.tar.gz", + "remote_url": "/openharmony/compiler/python/3.11.4/linux/python-linux-x86-GLIBC2.27-3.11.4_20250807.tar.gz", "unzip_filename": "linux-x86" } }, "darwin": { "x86_64": { - "remote_url": "/openharmony/compiler/python/3.11.4/darwin/python-darwin-x86-3.11.4_20250228.tar.gz", + "remote_url": "/openharmony/compiler/python/3.11.4/darwin/python-darwin-x86-3.11.4_20250804.tar.gz", "unzip_filename": "darwin-x86" }, "arm64": { - "remote_url": "/openharmony/compiler/python/3.11.4/darwin/python-darwin-arm64-3.11.4_20250228.tar.gz", + "remote_url": "/openharmony/compiler/python/3.11.4/darwin/python-darwin-arm64-3.11.4_20250804.tar.gz", "unzip_filename": "darwin-arm64" } } @@ -308,7 +362,7 @@ }, { "name": "rustc", - "tag": "base", + "tag": "rust", "type": "src, indep", "unzip_dir": "${code_dir}/prebuilts/rustc", "config": { @@ -396,9 +450,18 @@ ] }, { - "name": "init_ohpm", - "tag": "base", + "name": "hvigor", + "tag": "hvigor", "type": "src, indep", + "config": { + "linux": { + "x86_64": { + "unzip_dir": "prebuilts/tool/command-line-tools", + "remote_url": "/harmonyos/compiler/hvigor/5.0.3.906/command-line-tools.tar", + "unzip_filename": "hvigor" + } + } + }, "handle": [ { "type": "shell", @@ -418,28 +481,28 @@ "linux": { "arm64": { "unzip_dir": "${code_dir}/prebuilts/clang/ohos/linux-aarch64", - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/linux_aarch64/libcxx-ndk_linux-aarch64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/linux_aarch64/libcxx-ndk_linux-aarch64-115b62-20250811.tar.gz", "unzip_filename": "libcxx-ndk", - "rename_src": "${unzip_dir}/libcxx-ndk_linux-aarch64-2db1e1-20250605" + "rename_src": "${unzip_dir}/libcxx-ndk_linux-aarch64-115b62-20250811" }, "x86_64": [ { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/linux/libcxx-ndk_linux-x86_64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/linux/libcxx-ndk_linux-x86_64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/linux-x86_64", - "rename_src": "${unzip_dir}/libcxx-ndk_linux-x86_64-2db1e1-20250605" + "rename_src": "${unzip_dir}/libcxx-ndk_linux-x86_64-115b62-20250811" } ] }, "darwin": { "arm64": { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/darwin_arm64/libcxx-ndk_darwin-arm64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/darwin_arm64/libcxx-ndk_darwin-arm64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/darwin-arm64", - "rename_src": "${unzip_dir}/libcxx-ndk_darwin-arm64-2db1e1-20250605" + "rename_src": "${unzip_dir}/libcxx-ndk_darwin-arm64-115b62-20250811" }, "x86_64": { - "remote_url": "/openharmony/compiler/clang/15.0.4-2db1e1/darwin_x86_64/libcxx-ndk_darwin-x86_64-2db1e1-20250605.tar.gz", + "remote_url": "/openharmony/compiler/clang/15.0.4-115b62/darwin_x86_64/libcxx-ndk_darwin-x86_64-115b62-20250811.tar.gz", "unzip_dir": "${code_dir}/prebuilts/clang/ohos/darwin-x86_64", - "rename_src": "${unzip_dir}/libcxx-ndk_darwin-x86_64-2db1e1-20250605" + "rename_src": "${unzip_dir}/libcxx-ndk_darwin-x86_64-115b62-20250811" } } }, @@ -453,7 +516,7 @@ }, { "name": "npm_install", - "tag": "base", + "tag": "npm_install", "type": "indep", "handle": [ { @@ -466,14 +529,20 @@ "${code_dir}/third_party/weex-loader", "${code_dir}/arkcompiler/ets_frontend/legacy_bin/api8", "${code_dir}/interface/sdk-js/build-tools", - "${code_dir}/arkcompiler/ets_frontend/arkguard" + "${code_dir}/arkcompiler/ets_frontend/arkguard", + "${code_dir}/arkcompiler/ets_frontend/ets2panda/driver/build_system", + "${code_dir}/arkcompiler/ets_frontend/ets2panda/linter", + "${code_dir}/arkcompiler/ets_frontend/ets2panda/bindings", + "${code_dir}/arkcompiler/runtime_core/static_core/plugins/ets/tools/declgen_ts2sts", + "${code_dir}/developtools/ace_ets2bundle/koala-wrapper", + "${code_dir}/developtools/ace_ets2bundle/arkui-plugins" ] } ] }, { "name": "node_modules_copy", - "tag": "base", + "tag": "npm_install", "type": "indep", "handle": [ { @@ -515,7 +584,7 @@ }, { "name": "AGPBinaryCompile", - "tag": "base", + "tag": "AGP", "type": "indep", "unzip_filename": "AGPBinaryCompile", "config": { @@ -529,19 +598,31 @@ }, { "name": "taihe", - "tag": "base", + "tag": "taihe", "type": "indep", "unzip_filename": "taihe", "config": { "linux": { "x86_64, arm64": [ { - "remote_url": "/openharmony/compiler/Taihe/0.11/taihe-v0.11-20250410.tar.gz", + "remote_url": "/openharmony/compiler/Taihe/0.47/taihe-linux-x86_64-0.47.0-20250904.tar.gz", "unzip_dir": "${code_dir}/prebuilts/taihe/ohos/linux-x86_64" } ] } } + }, + { + "name": "sdk", + "tag": "sdk", + "type": "indep", + "handle": [ + { + "type": "download_sdk", + "sdk_name": "ohos-sdk-full-linux", + "version": 20 + } + ] } ] -} \ No newline at end of file +} diff --git a/prebuilts_config.py b/prebuilts_config.py index 2df03e2a7fce709983bcd7952bf06e35cbfe44eb..6dd97e54e1627ac5d37e585fd09206a4b9bbdace 100644 --- a/prebuilts_config.py +++ b/prebuilts_config.py @@ -56,10 +56,10 @@ def main(): global_args._create_default_https_context = ssl._create_unverified_context global_args.code_dir = get_code_dir() - config_file = os.path.join(global_args.code_dir,"build", "prebuilts_config.json") + config_file = os.path.join(global_args.code_dir, "build", "prebuilts_config.json") if global_args.config_file: config_file = global_args.config_file - + print(f"start parse config file {config_file}") config_parser = ConfigParser(config_file, global_args) download_operate, other_operate = config_parser.get_operate(global_args.part_names) prebuilts_path = os.path.join(global_args.code_dir, "prebuilts") @@ -67,9 +67,12 @@ def main(): os.makedirs(prebuilts_path) # 使用线程池下载 + print(f"start download prebuilts, tool list is:") + for item in download_operate: + print(item.get("remote_url")) pool_downloader = PoolDownloader(download_operate, global_args) - unchanged = pool_downloader.start() - + unchanged = pool_downloader.start() + print(f"start handle other operate") OperateHanlder.run(other_operate, global_args, unchanged) diff --git a/prebuilts_config.sh b/prebuilts_config.sh index 01b3fcc5405954e65881c54fb0cbd838e049c7dd..4216853974837f11ccae72071c475a905bee04a7 100644 --- a/prebuilts_config.sh +++ b/prebuilts_config.sh @@ -168,8 +168,10 @@ if [[ -v args["--download-sdk"] ]]; then DOWNLOAD_SDK=YES fi -if [[ "$DOWNLOAD_SDK" == "YES" ]] && [[ ! -d "${code_dir}/prebuilts/ohos-sdk/linux" ]]; then - $PYTHON_PATH/python3 ${code_dir}/build/scripts/download_sdk.py --branch master --product-name ohos-sdk-full-linux --api-version 20 +if ! [[ -v args["--part-names"] ]]; then + if [[ "$DOWNLOAD_SDK" == "YES" ]] && [[ ! -d "${code_dir}/prebuilts/ohos-sdk/linux" ]]; then + $PYTHON_PATH/python3 ${code_dir}/build/scripts/download_sdk.py --branch master --product-name ohos-sdk-full-linux --api-version 20 + fi fi diff --git a/prebuilts_service/README_zh.md b/prebuilts_service/README_zh.md index 64615a0223cda6f68db90a9fd1132202a2bb6a33..fecf9bbdb82c1e046d029053f82735cbed7ba869 100644 --- a/prebuilts_service/README_zh.md +++ b/prebuilts_service/README_zh.md @@ -3,8 +3,8 @@ 1. [核心配置说明](#section-download-core-01) 2. [基础配置示例](#section-download-basic-demo) 3. [高级配置示例](#section-download-advanced-demo) -- [后续处理配置](#advanced-process) -- [变量查找规则](#value-search) +- [处理配置](#advanced-process) +- [变量处理](#value-search) ## 工具下载配置 下载配置用于配置下载和解压参数 @@ -12,8 +12,8 @@ |参数|描述| |--|--| -remote_url|远程包下载地址(HTTP/HTTPS)| -unzip_dir|解压目标路径(绝对或相对路径)| +remote_url|远程包下载地址| +unzip_dir|解压目标路径| unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清理)| ### 基础配置示例 @@ -38,7 +38,7 @@ unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清 #### 场景2:CPU架构无关配置 -若工具包不依赖CPU架构(如纯脚本工具),可省略架构标识 +若工具包不依赖CPU架构(如纯脚本工具),可做如下配置: ``` json { "name": "ark_js_prebuilts", @@ -46,25 +46,34 @@ unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清 "type": "src, indep", "config": { "linux": { - "remote_url": "/openharmony/compiler/llvm_prebuilt_libs/ark_js_prebuilts_20230713.tar.gz", - "unzip_dir": "${code_dir}/prebuilts/ark_tools", - "unzip_filename": "ark_js_prebuilts" + "all_cpu": { + "remote_url": "/openharmony/compiler/llvm_prebuilt_libs/ark_js_prebuilts_20230713.tar.gz", + "unzip_dir": "${code_dir}/prebuilts/ark_tools", + "unzip_filename": "ark_js_prebuilts" + } + } } } ``` -#### 场景3:跨平台配置 -若工具包同时兼容多操作系统和CPU架构,配置进一步简化: +#### 场景3:平台无关配置 +若工具包和平台无关,配置进一步简化: ```json { "name": "ark_js_prebuilts", "tag": "base", "type": "src, indep", - "remote_url": "/openharmony/compiler/llvm_prebuilt_libs/ark_js_prebuilts_20230713.tar.gz", - "unzip_dir": "${code_dir}/prebuilts/ark_tools", - "unzip_filename": "ark_js_prebuilts" + "config": { + "all_os":{ + "all_cpu": { + "remote_url": "/openharmony/compiler/llvm_prebuilt_libs/ark_js_prebuilts_20230713.tar.gz", + "unzip_dir": "${code_dir}/prebuilts/ark_tools", + "unzip_filename": "ark_js_prebuilts" + } + } + } } ``` @@ -81,17 +90,17 @@ unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清 "x86_64": [ { "remote_url": "/openharmony/compiler/clang/15.0.4-3cec00/ohos_arm64/clang_ohos-arm64-3cec00-20250320.tar.gz", - "unzip_dir": "${code_dir}/prebuilts/clang/ohos/ohos-arm64", + "unzip_dir": "${code_dir}/prebuilts/clang/ohos/ohos-arm64", "unzip_filename": "llvm", }, { "remote_url": "/openharmony/compiler/clang/15.0.4-3cec00/windows/clang_windows-x86_64-3cec00-20250320.tar.gz", - "unzip_dir": "${code_dir}/prebuilts/clang/ohos/windows-x86_64", + "unzip_dir": "${code_dir}/prebuilts/clang/ohos/windows-x86_64", "unzip_filename": "llvm", }, { "remote_url": "/openharmony/compiler/clang/15.0.4-3cec00/linux/clang_linux-x86_64-3cec00-20250320.tar.gz", - "unzip_dir": "${code_dir}/prebuilts/clang/ohos/linux-x86_64", + "unzip_dir": "${code_dir}/prebuilts/clang/ohos/linux-x86_64", "unzip_filename": "llvm", } ] @@ -102,8 +111,8 @@ unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清 -#### 使用公共变量 -当配置中存在值相同的配置项时,可提取公共变量避免冗余:
+#### 使用公共配置 +当配置中存在值相同的配置项时,可提取公共配置避免冗余:
**原始冗余配置** ```json { @@ -156,21 +165,24 @@ unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清 #### 配置继承规则 - 工具配置会继承全局配置 - 平台配置会继承工具配置 -- 内部配置优于继承配置 - -## 后续处理配置 -工具下载解压完成后可能需要进行后续处理,该部分在handle中配置,handle是一个列表,其中的每一项都代表一个操作 +- 存在相同配置项时,内部配置会覆盖继承的配置 +#### 说明 +- 全局配置在工具配置的外层定义 +- 平台配置在config里面定义 +- 除config和handle,都属于工具配置 + +## 处理配置 +部分工具在下载解压完成后需要进行额外的处理,这些处理操作可以在handle中定义,handle会在下载解压完成后执行,若没有下载解压操作,handle则会直接执行。handle是一个列表,其中的每一项都代表一个操作 ### handle配置特点 -- 顺序执行:操作按配置顺序依次执行。 -- 变量继承:操作中可引用config和外部的配置参数 -- 灵活控制:可通过handle_index指定执行的操作序号。 -- 容错机制:若操作中的变量解析失败,跳过当前操作。 +- 顺序执行:操作项按配置顺序依次执行 +- 使用变量:操作中可使用外部变量 +- 灵活控制:平台配置中可通过指定handle_index,定制操作序列 +- 容错机制:若操作中的变量解析失败,跳过当前操作 ### 公共操作列表 |操作类型|参数|用途| |-|-|-| -|download| remote_url: 远程下载地
unzip_dir: 本地解压目
unzip_filename: 用于哈希校验和清理
**注:该操作通常而言无需显示声明,脚本会根据平台配置的remote_url自动生成对应的下载作 **| 下载和解压 | |symlink| src: 链接源
dest: 目的链接地址| 生成符号链接 |copy | src: 源
dest: 目的| 复制文件或文件夹 | |remove | path:要删除的路径, 可以是字符串,也可以是一个列表 | 删除文件或文件夹 | @@ -207,9 +219,9 @@ unzip_filename|解压后的顶层目录名(用于版本管理和旧文件清 ``` -## 变量查找规则 +## 变量处理 - 变量只能使用${var_name}的方式指定 -- 工具配置可以使用自身以及全局配置中的变量 -- 平台配置可以使用自身、工具以及全局配置中的变量 -- handle可以使用自身、平台、工具以及全局配置中的变量 -- 变量只会解析一次,采取就近解析原则 +- 工具配置可以使用自身内部以及全局配置中的变量 +- 平台配置可以使用自身内部、工具以及全局配置中的变量 +- handl中的操作项可以使用自身内部、平台、工具以及全局配置中的变量 +- 变量解析优先级为:自身内部配置 > 平台配置 > 工具配置 > 全局配置 diff --git a/prebuilts_service/common_utils.py b/prebuilts_service/common_utils.py index acb566c456f546b14d9c081ac2931be7fe1adf9a..b5d963c279529e27a5e6d3d282709e2238c09945 100644 --- a/prebuilts_service/common_utils.py +++ b/prebuilts_service/common_utils.py @@ -20,6 +20,7 @@ import pathlib import time import json import importlib +import re def get_code_dir(): @@ -52,6 +53,12 @@ def import_rich_module(): return progress +def save_data(file_path: str, data): + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, "w") as f: + json.dump(data, f, indent=4) + + def load_config(config_file: str): with open(config_file, "r", encoding="utf-8") as r: config = json.load(r) @@ -86,33 +93,6 @@ def symlink_src2dest(src_dir: str, dest_dir: str): print("symlink {} ---> {}".format(src_dir, dest_dir)) -def run_cmd_live(cmd: list): - cmd_str = " ".join(cmd) - print(f"run command: {cmd_str}\n") - try: - process = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True - ) - - while True: - output = process.stdout.readline() - if output == '' and process.poll() is not None: - break - if output: - print(output.strip()) - - return_code = process.poll() - if return_code != 0: - print(f"命令执行失败,返回码: {return_code}") - return return_code, "" - except Exception as e: - print(f"执行命令时出错: {e}") - return 1, "" - - def run_cmd_directly(cmd: list): cmd_str = " ".join(cmd) print(f"run command: {cmd_str}\n") @@ -129,7 +109,7 @@ def run_cmd(cmd: list) -> tuple: res = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) - sout, serr = res.communicate() + sout, serr = res.communicate(timeout=300) return sout.rstrip().decode("utf-8"), serr, res.returncode @@ -145,6 +125,33 @@ def is_system_component() -> bool: ) +def check_hpm_version(hpm_path: str, npm_path: str) -> bool: + if not os.path.exists(hpm_path): + print(f"hpm not found at {hpm_path}, now install.") + return False + local_hpm_version = subprocess.run([hpm_path, "-V"], capture_output=True, text=True).stdout.strip() + cmd = npm_path + " search hpm-cli --registry https://registry.npmjs.org/" + cmd = cmd.split() + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + try: + out, _ = proc.communicate(timeout=10) + except subprocess.TimeoutExpired: + proc.kill() + latest_hpm_version = "" + if proc.returncode == 0: + pattern = r'^@ohos/hpm-cli\s*\|(?:[^|]*\|){3}([^|]*)' + for line in out.splitlines(): + match = re.match(pattern, line) + if match: + latest_hpm_version = match.group(1).strip() + break + if latest_hpm_version and latest_hpm_version == local_hpm_version: + print(f"local hpm version: {local_hpm_version}, remote latest hpm version: {latest_hpm_version}") + return True + print(f"local hpm version: {local_hpm_version}, remote latest hpm version: {latest_hpm_version}") + return False + + def install_hpm_in_other_platform(name: str, operate: dict): download_dir = operate.get("download_dir") package_path = operate.get("package_path") diff --git a/prebuilts_service/config_parser.py b/prebuilts_service/config_parser.py index 38a488786f33a5921f859e7edb13e7edc1808632..2343c068e8d08dc243f180dfd4683cfa0a69c515 100644 --- a/prebuilts_service/config_parser.py +++ b/prebuilts_service/config_parser.py @@ -30,67 +30,102 @@ class ConfigParser: "code_dir": global_args.code_dir, "download_root": self.data["download_root"] } - VarParser.parse_vars(self.global_config, []) - download_root = self.global_config["download_root"] - self.global_config["download_root"] = os.path.abspath(os.path.expanduser(download_root)) + self._parse_global_config() def get_operate(self, part_names=None) -> tuple: download_op = [] other_op = [] tool_list = self.data["tool_list"] + # 独立编译按需下载 parts_configured_tags = get_parts_tag_config(part_names) if part_names else None if parts_configured_tags: self.input_tag = parts_configured_tags + # 获取下载操作和其他操作 for tool in tool_list: - _download, _other = self._get_tool_operate(tool) + tool_basic_config = self._parse_tool_basic_config(tool) + tool_basic_config = self._merge_configs(self.global_config, tool_basic_config) + if not self._apply_filters([tool_basic_config]): + continue + _download, _other = self._get_tool_operate(tool_basic_config, tool.get("config"), tool.get("handle", [])) download_op.extend(_download) other_op.extend(_other) return download_op, other_op + + def _parse_global_config(self): + # 解析全局配置中的变量 + VarParser.parse_vars(self.global_config, self.global_config) + download_root = self.global_config["download_root"] + self.global_config["download_root"] = os.path.abspath(os.path.expanduser(download_root)) - def _get_tool_operate(self, tool) -> tuple: - tool_matched, unified_tool_basic_config = self._is_tool_matched(tool) - if not tool_matched: - return [], [] - - matched_platform_configs = Filter.filter_platform(self.current_os, self.current_cpu, tool.get("config")) - for config in matched_platform_configs: - VarParser.parse_vars(config, [unified_tool_basic_config, self.global_config]) - unified_platform_configs = [] + def _get_tool_operate(self, tool_basic_config: dict, platform_config: dict, handle_config: list) -> tuple: + matched_platform_configs = self._match_platform(self.current_os, self.current_cpu, platform_config) + self._parse_platform_config(matched_platform_configs, tool_basic_config) + platform_configs = [] for conf in matched_platform_configs: - unified_platform_configs.append(self._unify_config(self.global_config, unified_tool_basic_config, conf)) - unified_platform_configs = Filter(unified_platform_configs).apply_filters(self.input_tag, self.input_type) - - handle = tool.get("handle", []) - - if unified_platform_configs: - # 有平台配置则只使用平台配置 - download_operate, other_operate = self._generate_tool_operate(unified_platform_configs, handle) - else: - # 没有平台配置则使用工具配置 - download_operate, other_operate = self._generate_tool_operate([unified_tool_basic_config], handle) - + config = self._merge_configs(tool_basic_config, conf) + platform_configs.append(config) + platform_configs = self._apply_filters(platform_configs) + handle = handle_config + download_operate, other_operate = self._generate_tool_operate(tool_basic_config, platform_configs, handle) # 删除存在未知变量的配置 return VarParser.remove_undefined(download_operate), VarParser.remove_undefined(other_operate) - def _is_tool_matched(self, tool): + def _parse_tool_basic_config(self, tool): tool_basic_config = {key: tool[key] for key in tool if key not in {"config", "handle"}} - VarParser.parse_vars(tool_basic_config, [self.global_config]) - unified_tool_basic_config = self._unify_config(self.global_config, tool_basic_config) - if not Filter([unified_tool_basic_config]).apply_filters(self.input_tag, self.input_type): - return False, [] - else: - return True, unified_tool_basic_config + VarParser.parse_vars(tool_basic_config, tool_basic_config) + VarParser.parse_vars(tool_basic_config, self.global_config) + return tool_basic_config + + def _parse_platform_config(self, matched_platform_configs: list, tool_basic_config: dict): + for config in matched_platform_configs: + VarParser.parse_vars(config, config) + VarParser.parse_vars(config, tool_basic_config) + + def _apply_filters(self, configs: list): + return Filter(configs).apply_filters(self.input_tag, self.input_type) + + def _match_platform(self, input_os: str, input_cpu: str, config: dict) -> list: + """获取匹配当前操作系统的配置""" + if not config: + return [] + filtered = [] - def _generate_tool_operate(self, outer_configs: list, handles: list) -> tuple: - if not outer_configs: - return [], [] + matched_os = self._match_os(input_os, config) + for os_item in matched_os: + cpu_config = config[os_item] + matched_cpu = self._match_cpu(input_cpu, cpu_config) + for cpu_item in matched_cpu: + platform_configs = cpu_config[cpu_item] + # 配置内部可以是一个配置,也可以是一个配置列表 + if not isinstance(platform_configs, list): + platform_configs = [platform_configs] + filtered.extend(platform_configs) + return filtered + def _match_os(self, input_os: str, os_config: dict) -> list: + matched_os = [] + for os_key in os_config: + # 逗号分割操作系统名 + configured_os_list = [o.strip() for o in os_key.split(",")] + if input_os in configured_os_list or configured_os_list == ["all_os"]: + matched_os.append(os_key) + return matched_os + + def _match_cpu(self, input_cpu: str, cpu_config: dict) -> list: + matched_cpu = [] + for cpu_str in cpu_config: + configured_cpu_list = [c.strip() for c in cpu_str.split(",")] + if input_cpu in configured_cpu_list or configured_cpu_list == ["all_cpu"]: + matched_cpu.append(cpu_str) + return matched_cpu + + def _generate_tool_operate(self, tool_basic_config: dict, platform_configs: list, handles: list) -> tuple: download_operate = [] other_operate = [] - # 根据配置,自动生成下载操作 - for config in outer_configs: - if config.get("remote_url"): + # 根据平台配置生成下载操作 + for config in platform_configs: + if config.get("remote_url") and config.get("unzip_dir") and config.get("unzip_filename"): download_config = self._generate_download_config(config) download_operate.append(download_config) @@ -98,12 +133,13 @@ class ConfigParser: if not handles: return download_operate, [] - operates = self._generate_handles(outer_configs, handles) - # 区分下载操作和其他操作 + configs = platform_configs if platform_configs else [tool_basic_config] + operates = self._generate_handles(configs, handles) + # handle中不允许配置下载操作 other_operate = [] for operate in operates: if operate["type"] == "download": - download_operate.append(operate) + pass else: other_operate.append(operate) @@ -127,7 +163,8 @@ class ConfigParser: # 不能改变原来的handle new_handle = copy.deepcopy(handle) # 解析handle中的变量 - VarParser.parse_vars(new_handle, [config]) + VarParser.parse_vars(new_handle, new_handle) + VarParser.parse_vars(new_handle, config) # 生成操作id new_handle["tool_name"] = config.get("name") new_handle["step_id"] = step_id @@ -148,7 +185,7 @@ class ConfigParser: print(f"error config: {config}") raise e - def _unify_config(self, *additional_configs) -> dict: + def _merge_configs(self, *additional_configs) -> dict: unified_config = dict() for config in additional_configs: unified_config.update(config) @@ -156,46 +193,12 @@ class ConfigParser: class Filter: - def __init__(self, configs=[]): + def __init__(self, configs): + if configs is None: + self.input_configs = [] + return self.input_configs = copy.deepcopy(configs) - @classmethod - def filter_platform(cls, current_os: str, current_cpu: str, config: dict) -> list: - """获取匹配当前操作系统的配置""" - if not config: - return [] - - filtered = [] - - for os_key, os_config in config.items(): - # 逗号分割操作系统名 - configured_os_list = [o.strip() for o in os_key.split(",")] - if current_os in configured_os_list: - # 不配cpu场景 - if isinstance(os_config, list): - filtered.extend(os_config) - continue - # 不配cpu, 仅有一个配置项场景 - if isinstance(os_config, dict) and "remote_url" in os_config: - filtered.extend(os_config) - continue - # 配cpu场景 - filtered.extend(cls.filter_cpu(current_cpu, os_config)) - return filtered - - @classmethod - def filter_cpu(cls, current_cpu: str, os_config: dict) -> list: - filtered = [] - for cpu_str in os_config: - configured_cpu_list = [c.strip() for c in cpu_str.split(",")] - if current_cpu in configured_cpu_list: - cpu_config = os_config[cpu_str] - # cpu配置内部可以是一个配置,也可以是一个配置列表 - if not isinstance(cpu_config, list): - cpu_config = [cpu_config] - filtered.extend(cpu_config) - return filtered - def apply_filters(self, input_tag: str, input_type: str): return self.filter_tag(input_tag).filter_type(input_type).result() @@ -262,31 +265,24 @@ class VarParser: return False @classmethod - def parse_vars(cls, data: dict, dictionarys: list): + def parse_vars(cls, data: any, dictionary: dict) -> any: """ - 解析config中的变量, 先自解析, 再按顺序查字典 - :param config: 需要进行变量解析的配置 - :param dictionarys: 字典列表 + 用dictionary字典中的值替换data中的变量,data可以为列表、字典、字符串等类型, 变量使用${var_name}形式 + 若data是字符串, 则返回新值, 否则, 更改原值 + return: 更改之后的值 """ - cls.replace_vars_in_data(data, data) - for dic in dictionarys: - cls.replace_vars_in_data(data, dic) - - @classmethod - def replace_vars_in_data(cls, data: any, dictionary: dict) -> any: - """用dictionary字典中的值替换data中的变量,data可以为列表、字典、字符串等类型, 变量使用${var_name}形式""" if isinstance(data, str): return cls.replace_vars_in_string(data, dictionary) elif isinstance(data, dict): for k in list(data.keys()): original_value = data[k] - new_value = cls.replace_vars_in_data(original_value, dictionary) + new_value = cls.parse_vars(original_value, dictionary) if new_value is not original_value: # 仅当original_value为字符串时成立 data[k] = new_value elif isinstance(data, list): for i in range(len(data)): original_value = data[i] - new_value = cls.replace_vars_in_data(original_value, dictionary) + new_value = cls.parse_vars(original_value, dictionary) if new_value is not original_value: data[i] = new_value else: @@ -297,13 +293,11 @@ class VarParser: def replace_vars_in_string(cls, s: str, dictionary: dict) -> str: """用dictionary字典中的值替换字符串s中的变量, 变量使用${var_name}形式""" - - replaced_var_names = set() # 避免循环依赖 - + ref_dict = dict() while True: try: replaced = cls.var_pattern.sub( - lambda matched_var: cls._replace_var_with_dict_value(matched_var, dictionary, replaced_var_names), + lambda matched_var: cls._replace_var_with_dict_value(matched_var, dictionary, ref_dict), s) if replaced == s: break @@ -314,10 +308,28 @@ class VarParser: return s @classmethod - def _replace_var_with_dict_value(cls, matched_var, dictionary, replaced_var_names): + def _replace_var_with_dict_value(cls, matched_var, dictionary, ref_dict): var_name = matched_var.group()[2:-1] - if var_name in replaced_var_names: - raise ValueError(f"Variable \"{var_name}\" is being replaced again.") if dictionary.get(var_name): - replaced_var_names.add(var_name) - return dictionary.get(var_name, matched_var.group()) # 找得到就替换,找不到就保留原始值 \ No newline at end of file + cls._update_ref_dict(ref_dict, var_name, dictionary.get(var_name)) + return dictionary.get(var_name) # 找得到就替换 + else: + return matched_var.group() # 找不到就保留原始值 + + @classmethod + def _update_ref_dict(cls, ref_dict, var_name, var_value): + if var_name not in ref_dict: + ref_dict[var_name] = [] + ref_vars = cls.var_pattern.findall(var_value) + for var in ref_vars: + name = var[2:-1] + ref_dict[var_name].append(name) + # 检测循环依赖 + cls._check_cycle_rely(ref_dict, var_name) + + @classmethod + def _check_cycle_rely(cls, ref_dict, var_name): + ref_list = ref_dict.get(var_name, []) + for ref_var in ref_list: + if var_name in ref_dict.get(ref_var, []): + raise ValueError(f"Cycle dependency exists between {var_name} and {ref_var}") \ No newline at end of file diff --git a/prebuilts_service/download_util.py b/prebuilts_service/download_util.py index 2fd4b58f761ec60cf68b070d1be3a2b345378b87..3b83cdefa74c212c3ea84ad902be2e5b1cbddf60 100644 --- a/prebuilts_service/download_util.py +++ b/prebuilts_service/download_util.py @@ -18,6 +18,7 @@ import os from common_utils import run_cmd import threading import hashlib +import time remote_sha256_cache = dict() @@ -82,6 +83,7 @@ def get_remote_sha256(remote_url: str) -> str: """ 从远程.sha256文件中获取哈希值 """ + start_time = time.time() with _cache_lock: # 加锁检查缓存 if remote_url in remote_sha256_cache: return remote_sha256_cache[remote_url] @@ -92,6 +94,10 @@ def get_remote_sha256(remote_url: str) -> str: with _cache_lock: # 加锁更新缓存 remote_sha256_cache[remote_url] = remote_sha256 + endtime = time.time() + cost_time = endtime - start_time + remote_file_name = os.path.basename(remote_url) + print(f"get remote sha256 for {remote_file_name} end, cost time: {cost_time}") return remote_sha256 diff --git a/prebuilts_service/operater.py b/prebuilts_service/operater.py index 5e11411ae2b903c5b0713452413ec31d0019e77e..f27787efd0e2f0a69a13515c97c6ced2c4eb1b62 100644 --- a/prebuilts_service/operater.py +++ b/prebuilts_service/operater.py @@ -25,38 +25,73 @@ from common_utils import ( install_hpm_in_other_platform, npm_install, is_system_component, + get_code_dir, + check_hpm_version, + save_data, + load_config, ) import re +import platform +from collections import OrderedDict class OperateHanlder: global_args = None @staticmethod - def run(operate_list: list, global_args, unchanged_list: tuple = ()): - ignore_list = [] - OperateHanlder.global_args = global_args - pre_process_tool = "" - for operate in operate_list: + def process_step(process_item: str, step_list: list, unchanged_list: list, processed_dict: dict): + process_result_file = os.path.join(OperateHanlder.global_args.code_dir, "prebuilts/.local_data/processed.json") + for step in step_list: try: - current_tool = re.match(r"(.*)_\d$", operate.get("step_id")).group(1) - shot_name = re.sub(r"(\.[A-Za-z]+)+$", "", current_tool).strip("_") + getattr(OperateHanlder, "_" + step.get("type"))(step) + except Exception as e: + # if the process item is already being processed, but not being recorded(that means prebuilts/processed.json is not exist), + # in this situation, we just check if the process item is in unchanged_list, + # if it is, then we don't need to process it again, we can just mark it as processed. + if process_item in unchanged_list: + processed_dict[process_item] = True + break + # If an error occurs, save the processed status + processed_dict[process_item] = False + save_data(process_result_file, processed_dict) + raise e - if current_tool != pre_process_tool: - print(f"\n==> process {shot_name}") - pre_process_tool = current_tool + @staticmethod + def run(operate_list: list, global_args, unchanged_list: tuple = ()): + OperateHanlder.global_args = global_args + # read and reset processed record + process_result_file = os.path.join(global_args.code_dir, "prebuilts/.local_data/processed.json") + if os.path.exists(process_result_file): + processed_dict = load_config(process_result_file) + else: + processed_dict = dict() + for key in processed_dict.keys(): + if key not in unchanged_list: + processed_dict[key] = False - if current_tool in ignore_list: - continue + # group operate_list by process item + item_steps_dict = OrderedDict() + for current_operate in operate_list: + current_process_item = re.match(r"(.*)_\d$", current_operate.get("step_id")).group(1) + if current_process_item not in item_steps_dict: + item_steps_dict[current_process_item] = [current_operate] + else: + item_steps_dict[current_process_item].append(current_operate) - getattr(OperateHanlder, "_" + operate.get("type"))(operate) - except Exception as e: - if current_tool in unchanged_list: - ignore_list.append(current_tool) - print(f"<== ignore process {shot_name}") + # process each item + for process_item, step_list in item_steps_dict.items(): + process_item_without_suffix = re.sub(r"(\.[A-Za-z]+)+$", "", process_item).strip("_") + # If the process item is in unchanged_list and has been processed, skip it + if process_item in unchanged_list: + if process_item in processed_dict and processed_dict[process_item]: + print(f"==> {process_item_without_suffix} is unchanged, skip") continue - else: - raise e + print(f"\n==> process {process_item_without_suffix}") + processed_dict[process_item] = False + OperateHanlder.process_step(process_item, step_list, unchanged_list, processed_dict) + processed_dict[process_item] = True + # save the processed status of each item + save_data(process_result_file, processed_dict) @staticmethod def _symlink(operate: dict): @@ -109,9 +144,13 @@ class OperateHanlder: @staticmethod def _hpm_download(operate: dict): + hpm_path = os.path.join(OperateHanlder.global_args.code_dir, "prebuilts/hpm/node_modules/.bin/hpm") + npm_tool_path = os.path.join(OperateHanlder.global_args.code_dir, "prebuilts/build-tools/common/nodejs/current/bin/npm") + if check_hpm_version(hpm_path, npm_tool_path): + print("hpm version is ok, skip hpm download") + return name = operate.get("name") download_dir = operate.get("download_dir") - npm_tool_path = os.path.join(OperateHanlder.global_args.code_dir, "prebuilts/build-tools/common/nodejs/current/bin/npm") symlink_dest = operate.get("symlink") if "@ohos/hpm-cli" == name: install_hpm(npm_tool_path, download_dir) @@ -185,3 +224,42 @@ class OperateHanlder: shutil.copytree(src_dir, dest_dir, symlinks=True) print(f"copy {src_dir} ---> dest: {dest_dir}") + @staticmethod + def _download_sdk(operate: dict): + # 获取操作系统信息 + system = platform.system() + if system == "Linux": + host_platform = "linux" + elif system == "Darwin": + host_platform = "darwin" + else: + print(f"Unsupported host platform: {system}") + exit(1) + + # 获取 CPU 架构信息 + machine = platform.machine() + if machine == "arm64": + host_cpu_prefix = "arm64" + elif machine == "aarch64": + host_cpu_prefix = "aarch64" + else: + host_cpu_prefix = "x86" + + # 假设 code_dir 是当前目录,可根据实际情况修改 + code_dir = get_code_dir() + prebuilts_python_dir = os.path.join(code_dir, "prebuilts", "python", f"{host_platform}-{host_cpu_prefix}") + python_dirs = [os.path.join(prebuilts_python_dir, d) for d in os.listdir(prebuilts_python_dir) if os.path.isdir(os.path.join(prebuilts_python_dir, d))] + python_dirs.sort(reverse=True) + if python_dirs: + python_path = os.path.join(python_dirs[0], "bin") + else: + raise Exception("python path not exist") + ohos_sdk_linux_dir = os.path.join(code_dir, "prebuilts", "ohos-sdk", "linux") + if not os.path.isdir(ohos_sdk_linux_dir): + python_executable = os.path.join(python_path, "python3") + script_path = os.path.join(code_dir, "build", "scripts", "download_sdk.py") + try: + subprocess.run([python_executable, script_path, "--branch", "master", "--product-name", operate.get("sdk_name"), "--api-version", str(operate.get("version"))], check=True) + + except subprocess.CalledProcessError as e: + print(f"Error running download_sdk.py: {e}") \ No newline at end of file diff --git a/prebuilts_service/part_prebuilts_config.json b/prebuilts_service/part_prebuilts_config.json index 92d7f7734c43688fd8b1c1d7ece1ddd1e435dab6..30e5070a6fac87f45604eb94aefb1ef1e3097f52 100644 --- a/prebuilts_service/part_prebuilts_config.json +++ b/prebuilts_service/part_prebuilts_config.json @@ -1,5 +1,220 @@ { + "crypto_framework":[ + "taihe" + ], + "ylong_runtime":[ + "rust" + ], + "ylong_json":[ + "rust" + ], + "dlp_manager":[ + "sdk", + "app", + "hvigor" + ], + "permission_manager":[ + "sdk", + "app", + "hvigor" + ], + "app_domain_verify":[ + "taihe" + ], + "distributed_bundle_framework":[ + "app" + ], "syscap_codec":[ - "base" + "taihe" + ], + "device_usage_statistics":[ + "app", + "sdk" + ], + "resource_schedule_service":[ + "app", + "sdk" + ], + "background_task_mgr":[ + "app", + "sdk" + ], + "work_scheduler":[ + "app", + "sdk" + ], + "bundle_tool":[ + "app", + "hvigor" + ], + "bundle_framework":[ + "app", + "sdk" + ], + "device_manager":[ + "app", + "sdk" + ], + "wallpaper_mgr":[ + "app" + ], + "power_manager":[ + "app", + "hvigor", + "sdk", + "taihe" + ], + "ability_runtime":[ + "app", + "hvigor", + "sdk" + ], + "form_fwk":[ + "app", + "sdk" + ], + "pasteboard":[ + "app", + "sdk" + ], + "distributed_notification_service":[ + "app", + "sdk" + ], + "init":[ + "taihe" + ], + "usb_manager":[ + "app", + "sdk", + "taihe" + ], + "image_framework":[ + "taihe" + ], + "bluetooth":[ + "taihe" + ], + "hiprofiler":[ + "sdk", + "app" + ], + "jsframework":[ + "npm_install" + ], + "auth_widget":[ + "sdk", + "app" + ], + "request":[ + "rust" + ], + "imf":[ + "sdk", + "app" + ], + "safwk":[ + "rust" + ], + "samgr":[ + "rust" + ], + "datamgr_service":[ + "rust" + ], + "location":[ + "sdk", + "taihe", + "app" + ], + "updater":[ + "rust" + ], + "display_manager":[ + "taihe" + ], + "core_service":[ + "app" + ], + "media_library":[ + "sdk", + "app" + ], + "ringtone_library":[ + "sdk", + "app" + ], + "system_resources":[ + "sdk", + "app" + ], + "wifi":[ + "sdk", + "app", + "hvigor", + "taihe" + ], + "netstack":[ + "rust" + ], + "telephony_data":[ + "sdk", + "app" + ], + "ipc":[ + "rust" + ], + "rust_libc":[ + "rust" + ], + "user_certificate_manager":[ + "sdk" + ], + "graphic_3d":[ + "AGP" + ], + "user_file_service":[ + "sdk", + "app" + ], + "user_auth_framework":[ + "taihe" + ], + "distributed_hardware_fwk":[ + "app", + "sdk" + ], + "file_api":[ + "rust" + ], + "jsvm":[ + "ark_js" + ], + "packing_tool":[ + "packing_tool", + "app" + ], + "hilog":[ + "rust" + ], + "hitrace":[ + "rust", + "sdk" + ], + "hicollie":[ + "rust" + ], + "hisysevent":[ + "rust" + ], + "c_utils":[ + "rust" + ], + "input":[ + "rust" + ], + "faultloggerd":[ + "rust", + "sdk" ] } \ No newline at end of file diff --git a/prebuilts_service/part_prebuilts_config.py b/prebuilts_service/part_prebuilts_config.py index 585595414a74dc6ecce98f8ae0810f15fad0136f..135afd96046d1a24e5df6e81907cb0c912820fa3 100644 --- a/prebuilts_service/part_prebuilts_config.py +++ b/prebuilts_service/part_prebuilts_config.py @@ -24,6 +24,7 @@ def get_parts_tag_config(part_names: list) -> set: all_required_tags = set() for part in part_names: all_required_tags.update(_get_tags_by_part(config_data, part)) + all_required_tags.add("base") print( "Required tags for parts {}: {}".format( ",".join(part_names), sorted(all_required_tags) diff --git a/prebuilts_service/pool_downloader.py b/prebuilts_service/pool_downloader.py index 454c864e78eed280777e357e66d4b6aa269099b8..72d4882f41c6364beb7086e445dca05afa809ca9 100644 --- a/prebuilts_service/pool_downloader.py +++ b/prebuilts_service/pool_downloader.py @@ -29,8 +29,7 @@ import traceback import threading from concurrent.futures import ThreadPoolExecutor, as_completed from multiprocessing import cpu_count -from urllib.request import urlopen -from functools import partial +import requests class PoolDownloader: @@ -94,7 +93,7 @@ class PoolDownloader: unzip_dir = operate.get("unzip_dir") unzip_filename = operate.get("unzip_filename") local_path = get_local_path(download_root, remote_url) - + self._adaptive_print(f"start deal {remote_url}") mark_file_exist, mark_file_path = check_sha256_by_mark(remote_url, unzip_dir, unzip_filename) # 检查解压的文件是否和远程一致 if mark_file_exist: @@ -128,6 +127,7 @@ class PoolDownloader: extract_compress_files_and_gen_mark(local_path, unzip_dir, mark_file_path) self._adaptive_print(f"{local_path} extracted to {unzip_dir}") + def _try_download(self, remote_url: str, local_path: str): max_retry_times = 3 # 创建下载目录 @@ -161,19 +161,24 @@ class PoolDownloader: def _download_remote_file(self, remote_url: str, local_path: str, progress_task_id): buffer_size = 32768 progress = self.progress - with urlopen(remote_url) as response: - total_size = int(response.info().get("Content-Length", 0)) - + # 使用requests库进行下载 + with requests.get(remote_url, stream=True, timeout=(30, 600)) as response: + response.raise_for_status() # 检查HTTP错误 + + total_size = int(response.headers.get("Content-Length", 0)) if progress: progress.update(progress_task_id, total=total_size) progress.start_task(progress_task_id) - - with open(local_path, "wb") as dest_file: - for data in iter(partial(response.read, buffer_size), b""): - dest_file.write(data) - self._update_progress(progress_task_id, len(data)) + self._save_to_local(response, local_path, buffer_size, progress_task_id) self._adaptive_print(f"Downloaded {local_path}") + def _save_to_local(self, response: requests.Response, local_path: str, buffer_size: int, progress_task_id): + with os.fdopen(os.open(local_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode=0o640), 'wb') as dest_file: + for chunk in response.iter_content(chunk_size=buffer_size): + if chunk: # 过滤掉保持连接的chunk + dest_file.write(chunk) + self._update_progress(progress_task_id, len(chunk)) + def _update_progress(self, task_id, advance): if self.progress: self.progress.update(task_id, advance=advance) diff --git a/prebuilts_service/shell/init_ohpm.sh b/prebuilts_service/shell/init_ohpm.sh index c976ed6c4428cab903b6dbaa67542f1988b01480..64a049e92ec9c91a01e470cca2ba02fa3819723f 100644 --- a/prebuilts_service/shell/init_ohpm.sh +++ b/prebuilts_service/shell/init_ohpm.sh @@ -64,12 +64,7 @@ cat $HOME/.npmrc | grep 'lockfile=false' > /dev/null || echo 'lockfile=false' >> function init_ohpm() { TOOLS_INSTALL_DIR="${code_dir}/prebuilts/build-tools/common" pushd ${TOOLS_INSTALL_DIR} > /dev/null - if [[ ! -f "${TOOLS_INSTALL_DIR}/oh-command-line-tools/ohpm/bin/ohpm" ]]; then - echo "[OHOS INFO] download oh-command-line-tools" - wget https://repo.huaweicloud.com/harmonyos/ohpm/5.0.2/oh-command-line-tools-20240715.zip -O ohcommandline-tools-linux.zip - unzip ohcommandline-tools-linux.zip - fi - OHPM_HOME=${TOOLS_INSTALL_DIR}/oh-command-line-tools/ohpm/bin + OHPM_HOME=${TOOLS_INSTALL_DIR}/../../tool/command-line-tools/ohpm/bin chmod +x ${OHPM_HOME}/ohpm export PATH=${OHPM_HOME}:$PATH chmod +x ${OHPM_HOME}/init @@ -88,6 +83,8 @@ function init_ohpm() { echo "[OHOS INFO] installing pnpm..." npm install --silent > /dev/null popd > /dev/null + HVIGORW_HOME=${TOOLS_INSTALL_DIR}/../../tool/command-line-tools/hvigor/bin/hvigorw + chmod +x ${HVIGORW_HOME} mkdir -p $HOME/.ohpm echo '{"devDependencies":{"@ohos/hypium":"1.0.6"}}' > $HOME/.ohpm/oh-package.json5 pushd $HOME/.ohpm > /dev/null diff --git a/templates/common/generate_component_package.py b/templates/common/generate_component_package.py index 7364c68a3c1ac037bbb1b7dad20739f583a49068..217a922bbc341c82d254ed38c92b1d8ab952e899 100644 --- a/templates/common/generate_component_package.py +++ b/templates/common/generate_component_package.py @@ -45,6 +45,8 @@ def _get_args(): help="build_arch_arg. default: x86", ) parser.add_argument("-lt", "--local_test", default=0, type=int, help="local test ,default: not local , 0", ) + parser.add_argument("-origin", "--build-origin", default="", type=str, + help="Origin marker for HPM package", ) args = parser.parse_args() return args @@ -623,6 +625,89 @@ def process_skia(part_data, parts_path_info, part_name, subsystem_name, componen _finish_component_build(part_data) +def process_variants_default(part_data, parts_path_info, part_name, subsystem_name, components_json): + preloader_path = os.path.join(part_data.get('root_path'), 'out', 'preloader', 'rk3568') + variants_default_source_files = [ + os.path.join(preloader_path, 'build_config.json'), + os.path.join(part_data.get('root_path'), 'build', 'indep_configs', 'variants', 'common', 'default_deps.json'), + os.path.join(preloader_path, 'features.json'), + os.path.join(preloader_path, 'parts_config.json'), + os.path.join(preloader_path, 'system', 'etc', 'syscap.json'), + os.path.join(preloader_path, 'system', 'etc', 'param', 'syscap.para'), + os.path.join(preloader_path, 'system', 'etc', 'SystemCapability.json') + ] + + variants_root = os.path.join(part_data.get('out_path'), 'component_package', 'variants', 'variants_default') + variants_component_path = os.path.join(variants_root, 'config') + try: + os.makedirs(variants_component_path, exist_ok=True) + for source_file in variants_default_source_files: + if not os.path.exists(source_file): + raise FileNotFoundError(f"Source file not found: {source_file}") + shutil.copy2(source_file, variants_component_path) + print("All confiauration files copied successfully") + + bundle_content = generate_variants_default_bundle_info() + bundle_path = os.path.join(variants_root, 'bundle.json') + _create_bundle_json(bundle_path, bundle_content) + + variants_default_license_path = os.path.join(variants_root, 'LICENSE') + variants_default_readme_path = os.path.join(variants_root, 'README.md') + with open(variants_default_license_path, 'w') as file: + file.write("license") + with open(variants_default_readme_path, 'w') as file: + file.write("readme") + + _finish_component_build(part_data) + except Exception as e: + print(f"Error processing variants_default: {str(e)}") + raise + + +def generate_variants_default_bundle_info(): + return { + "name": "@ohos/variants_default", + "description": "", + "version": "3.1.0-snapshot", + "license": "Apache License 2.0", + "publishAs": "binary", + "segment": { + "destPath": "variants/variants_default" + }, + "dirs": { + "config": [ + "config/*" + ] + }, + "scripts": {}, + "component": { + "name": "variants_default", + "subsystem": "build", + "syscap": [], + "features": [], + "adapted_system_type": [], + "rom": "", + "ram": "", + "deps": { + "components": [ + "musl", + "linux", + "googletest" + ], + "third_party": [] + }, + "build": { + "sub_component": [], + "inner_kits": [], + "test": [] + } + }, + "os": "linux", + "buildArch": "x86", + "dependencies": {} + } + + def write_hilog_gn(part_data, module): gn_path = os.path.join(part_data.get("out_path"), "component_package", part_data.get("part_path"), "innerapis", module, "BUILD.gn") @@ -807,23 +892,6 @@ def process_hisysevent(part_data, parts_path_info, part_name, subsystem_name, co _finish_component_build(part_data) -def _generate_runtime_core_build_gn(): - gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), - "innerapis", module, "BUILD.gn") - fd = os.open(gn_path, os.O_WRONLY | os.O_CREAT, mode=0o640) - fp = os.fdopen(fd, 'w') - _generate_import(fp) - _generate_configs(fp, module) - _generate_prebuilt_shared_library(fp, json_data.get('type'), module) - _generate_public_configs(fp, module) - _list = _generate_public_deps(fp, module, deps, components_json, public_deps_list) - _generate_other(fp, args, json_data, module) - _generate_end(fp) - print("_generate_build_gn has done ") - fp.close() - return _list - - def _handle_module_runtime_core(args, components_json, module): public_deps_list = [] if _is_innerkit(components_json, args.get("part_name"), module) == False: @@ -864,102 +932,13 @@ def process_runtime_core(part_data, parts_path_info, part_name, subsystem_name, _finish_component_build(part_data) -def process_drivers_interface_display(part_data, parts_path_info, part_name, subsystem_name, components_json): - part_path = _get_parts_path(parts_path_info, part_name) - if part_path is None: - return - part_data.update({"subsystem_name": subsystem_name, "part_name": part_name, - "part_path": part_path}) - modules = _parse_module_list(part_data) - print('modules', modules) - if len(modules) == 0: - return - is_component_build = False - _public_deps_list = [] - for module in modules: - module_deps_list = _handle_module(part_data, components_json, module) - if module_deps_list: - _public_deps_list.extend(module_deps_list) - is_component_build = True - lib_out_dir = os.path.join(part_data.get("out_path"), "component_package", - part_data.get("part_path"), "innerapis", "display_commontype_idl_headers", "libs") - if not os.path.exists(lib_out_dir): - os.makedirs(lib_out_dir) - file_path = os.path.join(lib_out_dir, 'libdisplay_commontype_idl_headers') - with open(file_path, 'wb') as file: - pass - if is_component_build: - _copy_required_docs(part_data, _public_deps_list) - _finish_component_build(part_data) - - -def process_drivers_interface_usb(part_data, parts_path_info, part_name, subsystem_name, components_json): - part_path = _get_parts_path(parts_path_info, part_name) - if part_path is None: - return - part_data.update({"subsystem_name": subsystem_name, "part_name": part_name, - "part_path": part_path}) - modules = _parse_module_list(part_data) - print('modules', modules) - if len(modules) == 0: - return - is_component_build = False - _public_deps_list = [] - for module in modules: - module_deps_list = _handle_module(part_data, components_json, module) - if module_deps_list: - _public_deps_list.extend(module_deps_list) - is_component_build = True - lib_out_dir = os.path.join(part_data.get("out_path"), "component_package", - part_data.get("part_path"), "innerapis", "usb_idl_headers_1.1", "libs") - if not os.path.exists(lib_out_dir): - os.makedirs(lib_out_dir) - file_path = os.path.join(lib_out_dir, 'libusb_idl_headers_1.1') - with open(file_path, 'wb') as file: - pass - if is_component_build: - _copy_required_docs(part_data, _public_deps_list) - _finish_component_build(part_data) - - -def process_drivers_interface_ril(part_data, parts_path_info, part_name, subsystem_name, components_json): - part_path = _get_parts_path(parts_path_info, part_name) - if part_path is None: - return - part_data.update({"subsystem_name": subsystem_name, "part_name": part_name, - "part_path": part_path}) - modules = _parse_module_list(part_data) - print('modules', modules) - if len(modules) == 0: - return - is_component_build = False - _public_deps_list = [] - for module in modules: - module_deps_list = _handle_module(part_data, components_json, module) - if module_deps_list: - _public_deps_list.extend(module_deps_list) - is_component_build = True - lib_out_dir = os.path.join(part_data.get("out_path"), "component_package", - part_data.get("part_path"), "innerapis", "ril_idl_headers", "libs") - if not os.path.exists(lib_out_dir): - os.makedirs(lib_out_dir) - file_path = os.path.join(lib_out_dir, 'libril_idl_headers') - with open(file_path, 'wb') as file: - pass - if is_component_build: - _copy_required_docs(part_data, _public_deps_list) - _finish_component_build(part_data) - - # 函数映射字典 function_map = { 'musl': process_musl, "developer_test": process_developer_test, # 同rust - "drivers_interface_display": process_drivers_interface_display, # 驱动的, 新建一个libs目录/ innerapi同名文件 "runtime_core": process_runtime_core, # 编译参数, 所有下面的innerapi的cflags都不 - "drivers_interface_usb": process_drivers_interface_usb, # 同驱动 - "drivers_interface_ril": process_drivers_interface_ril, # 同驱动 "skia": process_skia, + "variants_default": process_variants_default, } @@ -1261,11 +1240,11 @@ def _copy_lib(args, json_data, module): return lib_status, is_ohos_ets_copy -def _do_copy_static_deps_file(args, module, out_path, lib_path): +def _do_copy_static_deps_file(args, out_path, lib_path, toolchain): lib_status = False static_lib_path = os.path.join(out_path, lib_path) lib_out_dir = os.path.join(out_path, "component_package", - args.get("part_path"), "innerapis", module, "libs", "deps") + args.get("part_path"), "common", toolchain, "deps") lib_status = _copy_file(static_lib_path, lib_out_dir) or lib_status return lib_status @@ -1277,22 +1256,39 @@ def is_not_basic_lib(lib_path): return True -def copy_static_deps_file(args, label, module, so_path): - lib_status = False - out_path = args.get("out_path") - ninja_file = os.path.join(out_path, "obj", (label.split(':')[0]).split('//')[1], label.split(':')[1] + ".ninja") - prefix = "build " + so_path - deps_libs = [] +def read_deps_from_ninja_file(ninja_file, prefix): + print("ninja file: ", ninja_file) with open(ninja_file, 'r') as f: for line in f: if line.strip().startswith(prefix): deps_libs = line.strip().split(' ') - print("copy static deps: ", end="") - for lib in deps_libs: - if lib.endswith(".a") and lib != so_path and is_not_basic_lib(lib): - print(os.path.basename(lib), end=", ") - lib_status = _do_copy_static_deps_file(args, module, out_path, lib) or lib_status - print() + return deps_libs + return [] + + +def copy_static_deps_file(args, label, module, so_path): + toolchains = set(args.get("toolchain_info").keys()) + toolchains.add("") + lib_status = False + out_path = args.get("out_path") + for toolchain in toolchains: + ninja_file = os.path.join(out_path, toolchain, "obj", (label.split(':')[0]).split('//')[1], label.split(':')[1] + ".ninja") + if not os.path.exists(ninja_file): + continue + prefix = "build " + os.path.join(toolchain, so_path) + deps_libs = read_deps_from_ninja_file(ninja_file, prefix) + static_deps = [] + toolchain_module = toolchain + "_" + module + for lib in deps_libs: + lib_name = os.path.basename(lib) + if lib_name in static_deps: + print("lib_name: {} already in static_deps".format(lib_name)) + continue + if lib.endswith(".a") and lib != so_path and is_not_basic_lib(lib): + static_deps.append(lib_name) + lib_status = _do_copy_static_deps_file(args, out_path, lib, toolchain) or lib_status + args.get("static_deps")[toolchain_module] = static_deps + print("copy static deps: ", static_deps) return lib_status @@ -1319,6 +1315,7 @@ def copy_so_file(args, module, so_path, target_type): def _copy_file(so_path, lib_out_dir, target_type=""): + # 处理静态库依赖 if lib_out_dir.endswith("deps") or lib_out_dir.endswith("deps/"): if not os.path.isfile(so_path): print("WARNING: {} is not a file!".format(so_path)) @@ -1520,9 +1517,25 @@ def _generate_configs(fp, module, json_data, _part_name): fp.write(' }\n') +def _generate_group_configs(fp, module, json_data, _part_name): + includes = _handle_includes_data(json_data) + target_includes = [] + fp.write(' include_dirs = [\n') + for include in includes: + target_include = _get_target_include(_part_name, include) + if target_include not in target_includes: + target_includes.append(target_include) + for include_dir in target_includes: + include_dir = os.path.join('includes', include_dir) + fp.write(' "{}",\n'.format(include_dir)) + fp.write(' ]\n') + + def _generate_prebuilt_target(fp, target_type, module, is_ohos_ets_copy=False): if target_type == 'static_library': fp.write('ohos_prebuilt_static_library("' + module + '") {\n') + elif target_type == 'group': + fp.write('\nohos_shared_headers("' + module + '") {\n') elif target_type == 'executable': fp.write('ohos_prebuilt_executable("' + module + '") {\n') elif module != 'ipc_core' and (target_type == 'etc' or target_type == 'copy'): @@ -1542,11 +1555,6 @@ def _generate_public_configs(fp, module): # 目前特殊处理的依赖关系映射 _DEPENDENCIES_MAP = { - ('samgr', 'samgr_proxy'): ["ipc:ipc_core"], - ('napi', 'ace_napi'): ["ets_runtime:libark_jsruntime"], - ('ability_runtime', 'abilitykit_native'): ["ipc:ipc_napi"], - ('ipc', 'ipc_core'): ["c_utils:utils"], - ('input', 'libmmi-client'): ["eventhandler:libeventhandler"], ('ets_runtime', 'libark_jsruntime'): ["runtime_core:libarkfile_static"], } @@ -1557,7 +1565,7 @@ def _public_deps_special_handler(module, args): return _DEPENDENCIES_MAP.get((_part_name, module), []) -def _generate_public_deps(fp, module, deps: list, components_json, public_deps_list: list, args): +def _generate_public_external_deps(fp, module, deps: list, components_json, public_deps_list: list, args): fp.write(' public_external_deps = [\n') for dep in deps: public_external_deps = _get_public_external_deps(components_json, dep) @@ -1600,7 +1608,8 @@ def _generate_other(fp, args, json_data, module, is_ohos_ets_copy=False): so_name = output.split('/')[-1] if json_data.get('type') == 'copy' and module != 'ipc_core': fp.write(' copy_linkable_file = true \n') - fp.write(' source = "libs/' + so_name + '"\n') + if json_data.get('type') != 'group': + fp.write(' source = "libs/' + so_name + '"\n') fp.write(' part_name = "' + args.get("part_name") + '"\n') fp.write(' subsystem_name = "' + args.get("subsystem_name") + '"\n') @@ -1650,64 +1659,100 @@ def _copy_rust_crate_info(fp, json_data): fp.write(f' rust_crate_type = \"{json_data.get("rust_crate_type")}\"\n') -def _generate_static_deps(fp, deps: list): +def _get_static_deps(args, module, toolchain): + default_toolchain_module = toolchain + "_" + module + return args.get("static_deps").get(default_toolchain_module, []) + + +def _generate_static_public_deps_string(args, deps: list, toolchain: str): + public_deps_str = "" if not deps: - return - fp.write(' public_deps = [\n') + return "" + public_deps_str += ' public_deps = [\n' for dep in deps: - fp.write(f""" ":{dep}", \n""") - fp.write(' ]\n') + public_deps_str += f""" ":{dep}", \n""" + public_deps_str += ' ]\n' + return public_deps_str -def _get_deps_static_lib(args, module): - files = [] - static_deps_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), - "innerapis", module, "libs", "deps") - if os.path.exists(static_deps_path): - files = [f for f in os.listdir(static_deps_path) if os.path.isfile(os.path.join(static_deps_path, f))] - return files +def _generate_static_deps_target_string(args, deps: list, toolchain: str): + # target_path: part_name/innerapis/${innerapi_name}/${toolchain}/BUILD.gn + # static_lib_path: part_name/common/${toolchain}/deps + if toolchain: + source_prefix = os.path.join("../../../common", toolchain, "deps/") + else: + source_prefix = os.path.join("../../common", toolchain, "deps/") + output_prefix = os.path.join("common", toolchain, "deps/") + target_string = "" + for dep in deps: + target_string += '\n' + target_string += 'ohos_prebuilt_static_library("' + dep + '") {\n' + target_string += ' source = "' + source_prefix + dep + '"\n' + target_string += ' output = "' + output_prefix + dep + '"\n' + target_string += ' part_name = "' + args.get("part_name") + '"\n' + target_string += ' subsystem_name = "' + args.get("subsystem_name") + '"\n' + target_string += '}' + return target_string -def _generate_inner_static_deps(fp, args, deps: list): - for dep in deps: - fp.write('\n') - fp.write('ohos_prebuilt_static_library("' + dep + '") {\n') - fp.write(' source = "libs/deps/' + dep + '"\n') - fp.write(' part_name = "' + args.get("part_name") + '"\n') - fp.write(' subsystem_name = "' + args.get("subsystem_name") + '"\n') - fp.write('}') +def _generate_static_deps_target(fp, args, deps: list, toolchain): + target_string = _generate_static_deps_target_string(args, deps, toolchain) + fp.write(target_string) + + +def _generate_static_public_deps(fp, args, deps: list, toolchain): + public_deps_string = _generate_static_public_deps_string(args, deps, toolchain) + fp.write(public_deps_string) def _generate_build_gn(args, module, json_data, deps: list, components_json, public_deps_list, is_ohos_ets_copy=False): gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), "innerapis", module, "BUILD.gn") - static_deps_files = _get_deps_static_lib(args, module) + static_deps_files = _get_static_deps(args, module, "") # 处理静态库依赖 fd = os.open(gn_path, os.O_WRONLY | os.O_CREAT, mode=0o640) fp = os.fdopen(fd, 'w') - _generate_import(fp, is_ohos_ets_copy) - _generate_configs(fp, module, json_data, args.get('part_name')) _target_type = json_data.get('type') + _generate_import(fp, is_ohos_ets_copy) + if _target_type != "group": + _generate_configs(fp, module, json_data, args.get('part_name')) _generate_prebuilt_target(fp, _target_type, module, is_ohos_ets_copy) - _generate_public_configs(fp, module) - _list = _generate_public_deps(fp, module, deps, components_json, public_deps_list, args) - _generate_static_deps(fp, static_deps_files) + if _target_type == "group": + _generate_group_configs(fp, module, json_data, args.get('part_name')) + else: + _generate_public_configs(fp, module) # 写入public_configs的,group就不写入 + if _target_type != "group": + _list = _generate_public_external_deps(fp, module, deps, components_json, public_deps_list, args) + else: + _list = public_deps_list + _generate_static_public_deps(fp, args, static_deps_files, "") # 处理静态库依赖 if _target_type == "rust_library" or _target_type == "rust_proc_macro": _copy_rust_crate_info(fp, json_data) _generate_rust_deps(fp, json_data, components_json) _generate_other(fp, args, json_data, module, is_ohos_ets_copy) _generate_end(fp) - _generate_inner_static_deps(fp, args, static_deps_files) + _generate_static_deps_target(fp, args, static_deps_files, "") # 处理静态库依赖 print(f"{module}_generate_build_gn has done ") fp.close() return _list -def _toolchain_gn_modify(gn_path, file_name, toolchain_gn_file): - if os.path.isfile(gn_path) and file_name: +def _toolchain_gn_modify(args, module, toolchain_name, gn_path, so_name, toolchain_gn_file): + if os.path.isfile(gn_path) and so_name: with open(gn_path, 'r') as f: _gn = f.read() pattern = r"libs/(.*.)" - toolchain_gn = re.sub(pattern, 'libs/' + file_name + '\"', _gn) + toolchain_gn = re.sub(pattern, 'libs/' + so_name + '\"', _gn) + # 处理静态库依赖传递 + static_deps = _get_static_deps(args, module, toolchain_name) + public_deps_str = _generate_static_public_deps_string(args, static_deps, toolchain_name) + static_deps_target_str = _generate_static_deps_target_string(args, static_deps, toolchain_name) + public_deps_pattern = r" public_deps\s*=\s*\[\s*([^]]*)\s*\]" + toolchain_gn = re.sub(public_deps_pattern, public_deps_str, toolchain_gn, re.DOTALL) + static_deps_target_pattern = re.compile(r'ohos_prebuilt_static_library\("([^"]+\.a)"\)\s*\{[^}]*\}', re.DOTALL) + toolchain_gn = static_deps_target_pattern.sub("", toolchain_gn) + # toolchain_gn = re.sub(r"[\n]{2,}", "\n", toolchain_gn, re.DOTALL) # 删除多余换行符 + toolchain_gn += "\n" + toolchain_gn += static_deps_target_str fd = os.open(toolchain_gn_file, os.O_WRONLY | os.O_CREAT, mode=0o640) fp = os.fdopen(fd, 'w') fp.write(toolchain_gn) @@ -1731,14 +1776,14 @@ def _toolchain_gn_copy(args, module, out_name): for i in args.get("toolchain_info").keys(): lib_out_dir = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), "innerapis", module, i, "libs") - file_name = _get_toolchain_gn_file(lib_out_dir, out_name) - if not file_name: + so_name = _get_toolchain_gn_file(lib_out_dir, out_name) + if not so_name: continue toolchain_gn_file = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), "innerapis", module, i, "BUILD.gn") if not os.path.exists(toolchain_gn_file): os.mknod(toolchain_gn_file) - _toolchain_gn_modify(gn_path, file_name, toolchain_gn_file) + _toolchain_gn_modify(args, module, i, gn_path, so_name, toolchain_gn_file) def _parse_module_list(args): @@ -1920,20 +1965,33 @@ def _get_component_check(local_test) -> list: return check_list +def generate_made_in_mark_file(args): + from datetime import datetime + import pytz + str_time = datetime.now(pytz.timezone('Asia/Shanghai')).strftime("%Y_%m_%d_%H_%M_%S") + build_origin = args.get("build_origin", "") + if not build_origin: + return + mark_file = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), f"made_in_{build_origin}") + basic_dir = os.path.dirname(mark_file) + os.makedirs(basic_dir, exist_ok=True) + with open(f"{mark_file}_{str_time}", 'w') as f: + f.write(f"the hpm package is made in {build_origin}, {str_time}") + + def _package_interface(args, parts_path_info, part_name, subsystem_name, components_json): part_path = _get_parts_path(parts_path_info, part_name) if part_path is None: return args.update({"subsystem_name": subsystem_name, "part_name": part_name, "part_path": part_path}) + generate_made_in_mark_file(args) if part_name in [ "musl", # 从obj/third_party/musl/usr 下提取到includes和libs "developer_test", # 同rust - "drivers_interface_display", # 驱动的, 新建一个libs目录/ innerapi同名文件 "runtime_core", # 编译参数, 所有下面的innerapi的cflags都不 - "drivers_interface_usb", # 同驱动 - "drivers_interface_ril", # 同驱动 "skia", + "variants_default", ]: _process_part(args, parts_path_info, part_name, subsystem_name, components_json) else: @@ -1954,8 +2012,30 @@ def _get_exclusion_list(root_path): return data +def additional_comoponents_json(): + return {"rust": { + "innerapis": [], + "path": "third_party/rust", + "subsystem": "thirdparty", + "variants": [] + }, + "developer_test": { + "innerapis": [], + "path": "test/testfwk/developer_test", + "subsystem": "testfwk", + "variants": [] + }, + "variants_default": { + "innerapis": [], + "path": "variants/variants_default", + "subsystem": "build", + "variants": [] + }, + } + + def generate_component_package(out_path, root_path, components_list=None, build_type=0, organization_name='ohos', - os_arg='linux', build_arch_arg='x86', local_test=0): + os_arg='linux', build_arch_arg='x86', local_test=0, build_origin=''): """ Args: @@ -1970,24 +2050,14 @@ def generate_component_package(out_path, root_path, components_list=None, build_ os_arg: default : linux build_arch_arg: default : x86 local_test: 1 to open local test , default 0 to close + build_origin: Origin marker for HPM package Returns: """ start_time = time.time() components_json = _get_components_json(out_path) - components_json.update({"rust": { - "innerapis": [], - "path": "third_party/rust", - "subsystem": "thirdparty", - "variants": [] - }, - "developer_test": { - "innerapis": [], - "path": "test/testfwk/developer_test", - "subsystem": "testfwk", - "variants": [] - } - }) + additional_comoponents_json_data = additional_comoponents_json() + components_json.update(additional_comoponents_json_data) part_subsystem = _get_part_subsystem(components_json) parts_path_info = _get_parts_path_info(components_json) hpm_packages_path = _make_hpm_packages_dir(root_path) @@ -2014,7 +2084,9 @@ def generate_component_package(out_path, root_path, components_list=None, build_ args = {"out_path": out_path, "root_path": root_path, "os": os_arg, "buildArch": build_arch_arg, "hpm_packages_path": hpm_packages_path, "build_type": build_type, "organization_name": organization_name, - "toolchain_info": toolchain_info + "toolchain_info": toolchain_info, + "static_deps": {}, + "build_origin": build_origin } for key, value in part_subsystem.items(): part_name = key @@ -2036,7 +2108,8 @@ def main(): organization_name=py_args.organization_name, os_arg=py_args.os_arg, build_arch_arg=py_args.build_arch, - local_test=py_args.local_test) + local_test=py_args.local_test, + build_origin=py_args.build_origin) if __name__ == '__main__':