1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright (c) 2024 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15import subprocess
16import sys
17import stat
18import os
19import argparse
20import shutil
21import json
22import time
23import re
24import urllib.request
25
26
27def _get_args():
28    parser = argparse.ArgumentParser(add_help=True)
29    parser.add_argument("-op", "--out_path", default=r"./", type=str,
30                        help="path of out.", )
31    parser.add_argument("-rp", "--root_path", default=r"./", type=str,
32                        help="path of root. default: ./", )
33    parser.add_argument("-cl", "--components_list", default="", type=str,
34                        help="components_list , "
35                             "pass in the components' name, separated by commas , "
36                             "example: A,B,C . "
37                             "default: none", )
38    parser.add_argument("-bt", "--build_type", default=0, type=int,
39                        help="build_type ,default: 0", )
40    parser.add_argument("-on", "--organization_name", default='ohos', type=str,
41                        help="organization_name ,default: '' ", )
42    parser.add_argument("-os", "--os_arg", default=r"linux", type=str,
43                        help="path of output file. default: linux", )
44    parser.add_argument("-ba", "--build_arch", default=r"x86", type=str,
45                        help="build_arch_arg. default: x86", )
46    parser.add_argument("-lt", "--local_test", default=0, type=int,
47                        help="local test ,default: not local , 0", )
48    args = parser.parse_args()
49    return args
50
51
52def _check_label(public_deps, value):
53    innerapis = value["innerapis"]
54    for _innerapi in innerapis:
55        if _innerapi:
56            label = _innerapi.get("label")
57            if public_deps == label:
58                return label.split(':')[-1]
59            continue
60    return ""
61
62
63def _get_public_external_deps(data, public_deps):
64    if not isinstance(data, dict):
65        return ""
66    for key, value in data.items():
67        if not isinstance(value, dict):
68            continue
69        _data = _check_label(public_deps, value)
70        if _data:
71            return f"{key}:{_data}"
72        continue
73    return ""
74
75
76def _is_innerkit(data, part, module):
77    if not isinstance(data, dict):
78        return False
79
80    part_data = data.get(part)
81    if not isinstance(part_data, dict):
82        return False
83    module_list = []
84    for i in part_data["innerapis"]:
85        if i:
86            module_list.append(i["name"])
87    if module in module_list:
88        return True
89    return False
90
91
92def _get_components_json(out_path):
93    jsondata = ""
94    json_path = os.path.join(out_path + "/build_configs/parts_info/components.json")
95    with os.fdopen(os.open(json_path, os.O_RDWR | os.O_CREAT, stat.S_IWUSR | stat.S_IRUSR),
96                   'r', encoding='utf-8') as f:
97        try:
98            jsondata = json.load(f)
99        except Exception as e:
100            print('--_get_components_json parse json error--')
101    return jsondata
102
103
104def _handle_one_layer_json(json_key, json_data, desc_list):
105    data_list = json_data.get(json_key)
106    if isinstance(data_list, list) and len(json_data.get(json_key)) >= 1:
107        desc_list.extend(data_list)
108    else:
109        desc_list.append(json_data.get(json_key))
110
111
112def _handle_two_layer_json(json_key, json_data, desc_list):
113    value_depth = len(json_data.get(json_key))
114    for i in range(value_depth):
115        _include_dirs = json_data.get(json_key)[i].get('include_dirs')
116        if _include_dirs:
117            desc_list.extend(_include_dirs)
118
119
120def _get_json_data(args, module):
121    json_path = os.path.join(args.get("out_path"),
122                             args.get("subsystem_name"), args.get("part_name"), "publicinfo", module + ".json")
123    with os.fdopen(os.open(json_path, os.O_RDWR | os.O_CREAT, stat.S_IWUSR | stat.S_IRUSR),
124                   'r', encoding='utf-8') as f:
125        try:
126            jsondata = json.load(f)
127        except Exception as e:
128            print(json_path)
129            print('--_get_json_data parse json error--')
130    return jsondata
131
132
133def _handle_deps_data(json_data):
134    dep_list = []
135    if json_data.get('public_deps'):
136        _handle_one_layer_json('public_deps', json_data, dep_list)
137    return dep_list
138
139
140def _handle_includes_data(json_data):
141    include_list = []
142    if json_data.get('public_configs'):
143        _handle_two_layer_json('public_configs', json_data, include_list)
144    if json_data.get('all_dependent_configs'):
145        _handle_two_layer_json('all_dependent_configs', json_data, include_list)
146    return include_list
147
148
149def _get_static_lib_path(args, json_data):
150    label = json_data.get('label')
151    split_label = label.split("//")[1].split(":")[0]
152    real_static_lib_path = os.path.join(args.get("out_path"), "obj",
153                                        split_label, json_data.get('out_name'))
154    return real_static_lib_path
155
156
157def _copy_dir(src_path, target_path):
158    if not os.path.isdir(src_path):
159        return False
160    filelist_src = os.listdir(src_path)
161    suffix_list = [".h", ".hpp", ".in", ".inc", ".inl"]
162    for file in filelist_src:
163        path = os.path.join(os.path.abspath(src_path), file)
164        if os.path.isdir(path):
165            if file.startswith("."):
166                continue
167            path1 = os.path.join(target_path, file)
168            _copy_dir(path, path1)
169        else:
170            if not (os.path.splitext(path)[-1] in suffix_list):
171                continue
172            with open(path, 'rb') as read_stream:
173                contents = read_stream.read()
174            if not os.path.exists(target_path):
175                os.makedirs(target_path)
176            path1 = os.path.join(target_path, file)
177            with os.fdopen(os.open(path1, os.O_WRONLY | os.O_CREAT, mode=0o640), "wb") as write_stream:
178                write_stream.write(contents)
179    return True
180
181
182def _copy_includes(args, module, includes: list):
183    if module == 'ipc_single':
184        includes = [
185            "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include",
186            "//foundation/communication/ipc/ipc/native/src/core/include",
187            "//foundation/communication/ipc/ipc/native/src/mock/include",
188        ]
189    includes_out_dir = os.path.join(args.get("out_path"), "component_package",
190                                    args.get("part_path"), "innerapis", module, "includes")
191    for i in args.get("toolchain_info").keys():
192        toolchain_includes_out_dir = os.path.join(args.get("out_path"), "component_package",
193                                                  args.get("part_path"), "innerapis", module, i, "includes")
194        toolchain_lib_out_dir = os.path.join(args.get("out_path"), "component_package",
195                                             args.get("part_path"), "innerapis", module, i, "libs")
196        if not os.path.exists(toolchain_includes_out_dir) and os.path.exists(toolchain_lib_out_dir):
197            os.makedirs(toolchain_includes_out_dir)
198        else:
199            continue
200        for include in includes:
201            part_path = args.get("part_path")
202            _sub_include = include.split(f"{part_path}/")[-1]
203            split_include = include.split("//")[1]
204            real_include_path = os.path.join(args.get("root_path"), split_include)
205            if args.get('part_name') == 'libunwind':
206                _out_dir = os.path.join(toolchain_includes_out_dir, _sub_include)
207                _copy_dir(real_include_path, _out_dir)
208                continue
209            _copy_dir(real_include_path, toolchain_includes_out_dir)
210    if not os.path.exists(includes_out_dir):
211        os.makedirs(includes_out_dir)
212    for include in includes:
213        part_path = args.get("part_path")
214        _sub_include = include.split(f"{part_path}/")[-1]
215        split_include = include.split("//")[1]
216        real_include_path = os.path.join(args.get("root_path"), split_include)
217        if args.get('part_name') == 'libunwind':
218            _out_dir = os.path.join(includes_out_dir, _sub_include)
219            _copy_dir(real_include_path, _out_dir)
220            continue
221        _copy_dir(real_include_path, includes_out_dir)
222    print("_copy_includes has done ")
223
224
225def _copy_toolchain_lib(file_name, root, _name, lib_out_dir):
226    if not file_name.startswith('.') and file_name.startswith(_name):
227        if not os.path.exists(lib_out_dir):
228            os.makedirs(lib_out_dir)
229        file = os.path.join(root, file_name)
230        shutil.copy(file, lib_out_dir)
231
232
233def _toolchain_lib_handler(args, toolchain_path, _name, module, toolchain_name):
234    for root, dirs, files in os.walk(toolchain_path):
235        for file_name in files:
236            lib_out_dir = os.path.join(args.get("out_path"), "component_package",
237                                       args.get("part_path"), "innerapis", module, toolchain_name, "libs")
238            _copy_toolchain_lib(file_name, root, _name, lib_out_dir)
239
240
241def _toolchain_static_file_path_mapping(subsystem_name, args, i):
242    if subsystem_name == "thirdparty":
243        subsystem_name = "third_party"
244    toolchain_path = os.path.join(args.get("out_path"), i, 'obj', subsystem_name,
245                                  args.get("part_name"))
246    return toolchain_path
247
248
249def _copy_lib(args, json_data, module):
250    so_path = ""
251    lib_status = False
252    subsystem_name = args.get("subsystem_name")
253    if json_data.get('type') == 'static_library':
254        so_path = _get_static_lib_path(args, json_data)
255    elif json_data.get('type') == 'shared_library':
256        so_path = os.path.join(args.get("out_path"), subsystem_name,
257                               args.get("part_name"), json_data.get('out_name'))
258    elif json_data.get('type') == 'copy' and module == 'ipc_core':
259        so_path = os.path.join(args.get("out_path"), subsystem_name,
260                               args.get("part_name"), 'libipc_single.z.so')
261    if args.get("toolchain_info").keys():
262        for i in args.get("toolchain_info").keys():
263            so_type = ''
264            toolchain_path = os.path.join(args.get("out_path"), i, subsystem_name,
265                                          args.get("part_name"))
266            _name = json_data.get('out_name').split('.')[0]
267            if json_data.get('type') == 'static_library':
268                _name = json_data.get('out_name')
269                toolchain_path = _toolchain_static_file_path_mapping(subsystem_name, args, i)
270            _toolchain_lib_handler(args, toolchain_path, _name, module, i)
271            lib_status = lib_status or True
272    if os.path.isfile(so_path):
273        lib_out_dir = os.path.join(args.get("out_path"), "component_package",
274                                   args.get("part_path"), "innerapis", module, "libs")
275        if not os.path.exists(lib_out_dir):
276            os.makedirs(lib_out_dir)
277        shutil.copy(so_path, lib_out_dir)
278        lib_status = lib_status or True
279    return lib_status
280
281
282def _dirs_handler(bundlejson_out):
283    dirs = dict()
284    dirs['./'] = []
285    directory = bundlejson_out
286    for filename in os.listdir(directory):
287        filepath = os.path.join(directory, filename)
288        if os.path.isfile(filepath):
289            dirs['./'].append(filename)
290        else:
291            dirs[filename] = [f"{filename}/*"]
292    delete_list = ['LICENSE', 'README.md', 'README_zh.md', 'README_en.md', 'bundle.json']
293    for delete_txt in delete_list:
294        if delete_txt in dirs['./']:
295            dirs['./'].remove(delete_txt)
296    if dirs['./'] == []:
297        del dirs['./']
298    return dirs
299
300
301def _copy_bundlejson(args, public_deps_list):
302    bundlejson_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
303    print("bundlejson_out : ", bundlejson_out)
304    if not os.path.exists(bundlejson_out):
305        os.makedirs(bundlejson_out)
306    bundlejson = os.path.join(args.get("root_path"), args.get("part_path"), "bundle.json")
307    dependencies_dict = {}
308    for public_deps in public_deps_list:
309        _public_dep_part_name = public_deps.split(':')[0]
310        if _public_dep_part_name != args.get("part_name"):
311            _public_dep = f"@{args.get('organization_name')}/{_public_dep_part_name}"
312            dependencies_dict.update({_public_dep: "*"})
313    if os.path.isfile(bundlejson):
314        with open(bundlejson, 'r') as f:
315            bundle_data = json.load(f)
316            bundle_data['publishAs'] = 'binary'
317            bundle_data.update({'os': args.get('os')})
318            bundle_data.update({'buildArch': args.get('buildArch')})
319            dirs = _dirs_handler(bundlejson_out)
320            bundle_data['dirs'] = dirs
321            bundle_data['version'] = str(bundle_data['version'])
322            if bundle_data['version'] == '':
323                bundle_data['version'] = '1.0.0'
324            pattern = r'^(\d+)\.(\d+)(-[a-zA-Z]+)?$'  # 正则表达式匹配a.b[-后缀]格式的字符串
325            match = re.match(pattern, bundle_data['version'])
326            if match:
327                a = match.group(1)
328                b = match.group(2)
329                suffix = match.group(3) if match.group(3) else ""
330                bundle_data['version'] = f"{a}.{b}.0{suffix}"
331            if args.get('build_type') in [0, 1]:
332                bundle_data['version'] += '-snapshot'
333            if args.get('organization_name'):
334                _name_pattern = r'@(.*.)/'
335                bundle_data['name'] = re.sub(_name_pattern, '@' + args.get('organization_name') + '/',
336                                             bundle_data['name'])
337            if bundle_data.get('scripts'):
338                bundle_data.update({'scripts': {}})
339            if bundle_data.get('licensePath'):
340                del bundle_data['licensePath']
341            if bundle_data.get('readmePath'):
342                del bundle_data['readmePath']
343            bundle_data['dependencies'] = dependencies_dict
344            if os.path.isfile(os.path.join(bundlejson_out, "bundle.json")):
345                os.remove(os.path.join(bundlejson_out, "bundle.json"))
346            with os.fdopen(os.open(os.path.join(bundlejson_out, "bundle.json"), os.O_WRONLY | os.O_CREAT, mode=0o640),
347                           "w",
348                           encoding='utf-8') as fd:
349                json.dump(bundle_data, fd, indent=4, ensure_ascii=False)
350
351
352def _copy_license(args):
353    license_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
354    print("license_out : ", license_out)
355    if not os.path.exists(license_out):
356        os.makedirs(license_out)
357    license_file = os.path.join(args.get("root_path"), args.get("part_path"), "LICENSE")
358    if os.path.isfile(license_file):
359        shutil.copy(license_file, license_out)
360    else:
361        license_default = os.path.join(args.get("root_path"), "build", "LICENSE")
362        shutil.copy(license_default, license_out)
363        bundlejson_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), 'bundle.json')
364        with open(bundlejson_out, 'r') as f:
365            bundle_data = json.load(f)
366            bundle_data.update({"license": "Apache License 2.0"})
367        if os.path.isfile(bundlejson_out):
368            os.remove(bundlejson_out)
369        with os.fdopen(os.open(bundlejson_out, os.O_WRONLY | os.O_CREAT, mode=0o640), "w",
370                       encoding='utf-8') as fd:
371            json.dump(bundle_data, fd, indent=4, ensure_ascii=False)
372
373
374def _copy_readme(args):
375    readme_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
376    print("readme_out : ", readme_out)
377    if not os.path.exists(readme_out):
378        os.makedirs(readme_out)
379    readme = os.path.join(args.get("root_path"), args.get("part_path"), "README.md")
380    readme_zh = os.path.join(args.get("root_path"), args.get("part_path"), "README_zh.md")
381    readme_en = os.path.join(args.get("root_path"), args.get("part_path"), "README_en.md")
382    readme_out_file = os.path.join(readme_out, "README.md")
383    if os.path.isfile(readme):
384        shutil.copy(readme, readme_out)
385    elif os.path.isfile(readme_zh):
386        shutil.copy(readme_zh, readme_out_file)
387    elif os.path.isfile(readme_en):
388        shutil.copy(readme_en, readme_out_file)
389    else:
390        try:
391            with os.fdopen(os.open(readme_out_file, os.O_WRONLY | os.O_CREAT, mode=0o640), 'w') as fp:
392                fp.write('READ.ME')
393        except FileExistsError:
394            pass
395
396
397def _generate_import(fp):
398    fp.write('import("//build/ohos.gni")\n')
399
400
401def _generate_configs(fp, module):
402    fp.write('\nconfig("' + module + '_configs") {\n')
403    fp.write('  visibility = [ ":*" ]\n')
404    fp.write('  include_dirs = [\n')
405    fp.write('    "includes",\n')
406    if module == 'libunwind':
407        fp.write('    "includes/libunwind/src",\n')
408        fp.write('    "includes/libunwind/include",\n')
409        fp.write('    "includes/libunwind/include/tdep-arm",\n')
410    if module == 'ability_runtime':
411        fp.write('    "includes/context",\n')
412        fp.write('    "includes/app",\n')
413    fp.write('  ]\n')
414    if module == 'libunwind':
415        fp.write('  cflags = [\n')
416        fp.write("""    "-D_GNU_SOURCE",
417    "-DHAVE_CONFIG_H",
418    "-DNDEBUG",
419    "-DCC_IS_CLANG",
420    "-fcommon",
421    "-Werror",
422    "-Wno-absolute-value",
423    "-Wno-header-guard",
424    "-Wno-unused-parameter",
425    "-Wno-unused-variable",
426    "-Wno-int-to-pointer-cast",
427    "-Wno-pointer-to-int-cast",
428    "-Wno-inline-asm",
429    "-Wno-shift-count-overflow",
430    "-Wno-tautological-constant-out-of-range-compare",
431    "-Wno-unused-function",\n""")
432        fp.write('  ]\n')
433    fp.write('  }\n')
434
435
436def _generate_prebuilt_shared_library(fp, lib_type, module):
437    if lib_type == 'static_library':
438        fp.write('ohos_prebuilt_static_library("' + module + '") {\n')
439    elif lib_type == 'executable':
440        fp.write('ohos_prebuilt_executable("' + module + '") {\n')
441    elif lib_type == 'etc':
442        fp.write('ohos_prebuilt_etc("' + module + '") {\n')
443    else:
444        fp.write('ohos_prebuilt_shared_library("' + module + '") {\n')
445
446
447def _generate_public_configs(fp, module):
448    fp.write(f'  public_configs = [":{module}_configs"]\n')
449
450
451def _public_deps_special_handler(module):
452    if module == 'appexecfwk_core':
453        return ["ability_base:want"]
454    return []
455
456
457def _generate_public_deps(fp, module, deps: list, components_json, public_deps_list: list):
458    if not deps:
459        return public_deps_list
460    fp.write('  public_external_deps = [\n')
461    for dep in deps:
462        public_external_deps = _get_public_external_deps(components_json, dep)
463        if len(public_external_deps) > 0:
464            fp.write(f"""    "{public_external_deps}",\n""")
465            public_deps_list.append(public_external_deps)
466    for _public_external_deps in _public_deps_special_handler(module):
467        fp.write(f"""    "{_public_external_deps}",\n""")
468        public_deps_list.append(_public_external_deps)
469    fp.write('  ]\n')
470
471    return public_deps_list
472
473
474def _generate_other(fp, args, json_data, module):
475    so_name = json_data.get('out_name')
476    if json_data.get('type') == 'copy' and module == 'ipc_core':
477        so_name = 'libipc_single.z.so'
478    fp.write('  source = "libs/' + so_name + '"\n')
479    fp.write('  part_name = "' + args.get("part_name") + '"\n')
480    fp.write('  subsystem_name = "' + args.get("subsystem_name") + '"\n')
481
482
483def _generate_end(fp):
484    fp.write('}')
485
486
487def _generate_build_gn(args, module, json_data, deps: list, components_json, public_deps_list):
488    gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"),
489                           "innerapis", module, "BUILD.gn")
490    fd = os.open(gn_path, os.O_WRONLY | os.O_CREAT, mode=0o640)
491    fp = os.fdopen(fd, 'w')
492    _generate_import(fp)
493    _generate_configs(fp, module)
494    _generate_prebuilt_shared_library(fp, json_data.get('type'), module)
495    _generate_public_configs(fp, module)
496    _list = _generate_public_deps(fp, module, deps, components_json, public_deps_list)
497    _generate_other(fp, args, json_data, module)
498    _generate_end(fp)
499    print("_generate_build_gn has done ")
500    fp.close()
501    return _list
502
503
504def _toolchain_gn_modify(gn_path, file_name, toolchain_gn_file):
505    if os.path.isfile(gn_path) and file_name:
506        with open(gn_path, 'r') as f:
507            _gn = f.read()
508            pattern = r"libs/(.*.)"
509            toolchain_gn = re.sub(pattern, 'libs/' + file_name + '\"', _gn)
510        fd = os.open(toolchain_gn_file, os.O_WRONLY | os.O_CREAT, mode=0o640)
511        fp = os.fdopen(fd, 'w')
512        fp.write(toolchain_gn)
513        fp.close()
514
515
516def _get_toolchain_gn_file(lib_out_dir):
517    file_name = ''
518    try:
519        file_list = os.scandir(lib_out_dir)
520    except FileNotFoundError:
521        return file_name
522    for file in file_list:
523        if not file.name.startswith('.') and file.is_file():
524            file_name = file.name
525    return file_name
526
527
528def _toolchain_gn_copy(args, module):
529    gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"),
530                           "innerapis", module, "BUILD.gn")
531    for i in args.get("toolchain_info").keys():
532        lib_out_dir = os.path.join(args.get("out_path"), "component_package",
533                                   args.get("part_path"), "innerapis", module, i, "libs")
534        file_name = _get_toolchain_gn_file(lib_out_dir)
535        if not file_name:
536            continue
537        toolchain_gn_file = os.path.join(args.get("out_path"), "component_package",
538                                         args.get("part_path"), "innerapis", module, i, "BUILD.gn")
539        if not os.path.exists(toolchain_gn_file):
540            os.mknod(toolchain_gn_file)
541        _toolchain_gn_modify(gn_path, file_name, toolchain_gn_file)
542
543
544def _parse_module_list(args):
545    module_list = []
546    publicinfo_path = os.path.join(args.get("out_path"),
547                                   args.get("subsystem_name"), args.get("part_name"), "publicinfo")
548    print('publicinfo_path', publicinfo_path)
549    if os.path.exists(publicinfo_path) is False:
550        return module_list
551    publicinfo_dir = os.listdir(publicinfo_path)
552    for filename in publicinfo_dir:
553        if filename.endswith(".json"):
554            module_name = filename.split(".json")[0]
555            module_list.append(module_name)
556            print('filename', filename)
557    print('module_list', module_list)
558    return module_list
559
560
561def _lib_special_handler(part_name, module, args):
562    if part_name == 'mksh':
563        mksh_file_path = os.path.join(args.get('out_path'), 'startup', 'init', 'sh')
564        sh_out = os.path.join(args.get("out_path"), "thirdparty", "mksh")
565        if os.path.isfile(mksh_file_path):
566            shutil.copy(mksh_file_path, sh_out)
567    if module == 'blkid':
568        blkid_file_path = os.path.join(args.get('out_path'), 'filemanagement', 'storage_service', 'blkid')
569        blkid_out = os.path.join(args.get("out_path"), "thirdparty", "e2fsprogs")
570        if os.path.isfile(blkid_file_path):
571            shutil.copy(blkid_file_path, blkid_out)
572    if module == 'grpc_cpp_plugin':
573        blkid_file_path = os.path.join(args.get('out_path'), 'clang_x64', 'thirdparty', 'grpc', 'grpc_cpp_plugin')
574        blkid_out = os.path.join(args.get("out_path"), "thirdparty", "grpc")
575        if os.path.isfile(blkid_file_path):
576            shutil.copy(blkid_file_path, blkid_out)
577
578
579def _generate_component_package(args, components_json):
580    part_name = args.get("part_name")
581    modules = _parse_module_list(args)
582    print('modules', modules)
583    if len(modules) == 0:
584        return
585    is_component_build = False
586    _public_deps_list = []
587    for module in modules:
588        public_deps_list = []
589        if _is_innerkit(components_json, args.get("part_name"), module) == False:
590            continue
591        json_data = _get_json_data(args, module)
592        _lib_special_handler(part_name, module, args)
593        lib_exists = _copy_lib(args, json_data, module)
594        if lib_exists is False:
595            continue
596        is_component_build = True
597        includes = _handle_includes_data(json_data)
598        deps = _handle_deps_data(json_data)
599        _copy_includes(args, module, includes)
600        _list = _generate_build_gn(args, module, json_data, deps, components_json, public_deps_list)
601        if _list:
602            _public_deps_list.extend(_list)
603        _toolchain_gn_copy(args, module)
604    if is_component_build:
605        _copy_bundlejson(args, _public_deps_list)
606        _copy_license(args)
607        _copy_readme(args)
608        if args.get("build_type") in [0, 1]:
609            _hpm_status = _hpm_pack(args)
610            if _hpm_status:
611                _copy_hpm_pack(args)
612
613
614def _get_part_subsystem(components_json: dict):
615    jsondata = dict()
616    try:
617        for component, v in components_json.items():
618            jsondata[component] = v.get('subsystem')
619    except Exception as e:
620        print('--_get_part_subsystem parse json error--')
621    return jsondata
622
623
624def _get_parts_path_info(components_json):
625    jsondata = dict()
626    try:
627        for component, v in components_json.items():
628            jsondata[component] = v.get('path')
629    except Exception as e:
630        print('--_get_part_subsystem parse json error--')
631    return jsondata
632
633
634def _get_toolchain_info(root_path):
635    jsondata = ""
636    json_path = os.path.join(root_path + "/build/indep_configs/variants/common/toolchain.json")
637    with os.fdopen(os.open(json_path, os.O_RDWR | os.O_CREAT, stat.S_IWUSR | stat.S_IRUSR),
638                   'r', encoding='utf-8') as f:
639        try:
640            jsondata = json.load(f)
641        except Exception as e:
642            print('--_get_toolchain_info parse json error--')
643    return jsondata
644
645
646def _get_parts_path(json_data, part_name):
647    parts_path = None
648    if json_data.get(part_name) is not None:
649        parts_path = json_data[part_name]
650    return parts_path
651
652
653def _hpm_pack(args):
654    part_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
655    cmd = ['hpm', 'pack']
656    try:
657        subprocess.run(cmd, shell=False, cwd=part_path)
658    except Exception as e:
659        print("{} pack fail".format(args.get("part_name")))
660        return 0
661    print("{} pack succ".format(args.get("part_name")))
662    return 1
663
664
665def _copy_hpm_pack(args):
666    hpm_packages_path = args.get('hpm_packages_path')
667    part_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
668    dirs = os.listdir(part_path)
669    tgz_file_name = ''
670    for file in dirs:
671        if file.endswith(".tgz"):
672            tgz_file_name = file
673    tgz_file_out = os.path.join(part_path, tgz_file_name)
674    if tgz_file_name:
675        shutil.copy(tgz_file_out, hpm_packages_path)
676
677
678def _make_hpm_packages_dir(root_path):
679    _out_path = os.path.join(root_path, 'out')
680    hpm_packages_path = os.path.join(_out_path, 'hpm_packages')
681    os.makedirs(hpm_packages_path, exist_ok=True)
682    return hpm_packages_path
683
684
685def _del_exist_component_package(out_path):
686    _component_package_path = os.path.join(out_path, 'component_package')
687    if os.path.isdir(_component_package_path):
688        try:
689            print('del dir component_package start..')
690            shutil.rmtree(_component_package_path)
691            print('del dir component_package end..')
692        except Exception as e:
693            print('del dir component_package FAILED')
694
695
696def _get_component_check(local_test) -> list:
697    check_list = []
698    if local_test == 0:
699        contents = urllib.request.urlopen(
700            "https://ci.openharmony.cn/api/daily_build/component/check/list").read().decode(
701            encoding="utf-8")
702        _check_json = json.loads(contents)
703        try:
704            check_list.extend(_check_json["data"]["dep_list"])
705            check_list.extend(_check_json["data"]["indep_list"])
706        except Exception as e:
707            print("Call the component check API something wrong, plz check the API return..")
708    check_list = list(set(check_list))
709    check_list = sorted(check_list)
710    return check_list
711
712
713def _package_interface(args, parts_path_info, part_name, subsystem_name, components_json):
714    part_path = _get_parts_path(parts_path_info, part_name)
715    if part_path is None:
716        return
717    args.update({"subsystem_name": subsystem_name, "part_name": part_name,
718                 "part_path": part_path})
719    _generate_component_package(args, components_json)
720
721
722def generate_component_package(out_path, root_path, components_list=None, build_type=0, organization_name='ohos',
723                               os_arg='linux', build_arch_arg='x86', local_test=0):
724    """
725
726    Args:
727        out_path: output path of code default : out/rk3568
728        root_path: root path of code default : oh/
729        components_list: list of all components that need to be built
730        build_type: build type
731            0: default pack,do not change organization_name
732            1: pack ,change organization_name
733            2: do not pack,do not change organization_name
734        organization_name: default ohos, if diff then change
735        os_arg: default : linux
736        build_arch_arg:  default : x86
737        local_test: 1 to open local test , 0 to close , 2 to pack init and init deps
738    Returns:
739
740    """
741    start_time = time.time()
742    _check_list = _get_component_check(local_test)
743    if local_test == 1 and not components_list:
744        components_list = []
745    elif local_test == 1 and components_list:
746        components_list = [component for component in components_list.split(",")]
747    elif local_test == 2:
748        components_list = ["init", "appspawn", "safwk", "c_utils",
749                           "napi", "ipc", "config_policy", "hilog", "hilog_lite", "samgr", "access_token", "common",
750                           "dsoftbus", "hvb", "hisysevent", "hiprofiler", "bounds_checking_function",
751                           "bundle_framework", "selinux", "selinux_adapter", "storage_service",
752                           "mbedtls", "zlib", "libuv", "cJSON", "mksh", "libunwind", "toybox",
753                           "bounds_checking_function",
754                           "selinux", "libunwind", "mbedtls", "zlib", "cJSON", "mksh", "toybox", "config_policy",
755                           "e2fsprogs", "f2fs-tools", "selinux_adapter", "storage_service"
756                           ]
757    elif components_list:
758        components_list = [component for component in components_list.split(",") if component in _check_list]
759        if not components_list:
760            sys.exit("stop for no target to pack..")
761    else:
762        components_list = _check_list
763        if not components_list:
764            sys.exit("stop for no target to pack..")
765    print('components_list', components_list)
766    components_json = _get_components_json(out_path)
767    part_subsystem = _get_part_subsystem(components_json)
768    parts_path_info = _get_parts_path_info(components_json)
769    hpm_packages_path = _make_hpm_packages_dir(root_path)
770    toolchain_info = _get_toolchain_info(root_path)
771    # del component_package
772    _del_exist_component_package(out_path)
773    args = {"out_path": out_path, "root_path": root_path,
774            "os": os_arg, "buildArch": build_arch_arg, "hpm_packages_path": hpm_packages_path,
775            "build_type": build_type, "organization_name": organization_name,
776            "toolchain_info": toolchain_info
777            }
778    for key, value in part_subsystem.items():
779        part_name = key
780        subsystem_name = value
781        # components_list is NONE or part name in components_list
782        if not components_list:
783            _package_interface(args, parts_path_info, part_name, subsystem_name, components_json)
784        for component in components_list:
785            if part_name == component:
786                _package_interface(args, parts_path_info, part_name, subsystem_name, components_json)
787
788    end_time = time.time()
789    run_time = end_time - start_time
790    print("generate_component_package out_path", out_path)
791    print(f"Generating binary product package takes time:{run_time}")
792
793
794def main():
795    py_args = _get_args()
796    generate_component_package(py_args.out_path,
797                               py_args.root_path,
798                               components_list=py_args.components_list,
799                               build_type=py_args.build_type,
800                               organization_name=py_args.organization_name,
801                               os_arg=py_args.os_arg,
802                               build_arch_arg=py_args.build_arch,
803                               local_test=py_args.local_test)
804
805
806if __name__ == '__main__':
807    main()
808