diff --git a/ci/builders/linux_fuchsia.json b/ci/builders/linux_fuchsia.json index d003edb6d054e..947226c7f5c56 100644 --- a/ci/builders/linux_fuchsia.json +++ b/ci/builders/linux_fuchsia.json @@ -230,13 +230,14 @@ "parameters": [ "--engine-version", "${REVISION}", + "--skip-build", "--upload" ], "script": "flutter/tools/fuchsia/build_fuchsia_artifacts.py", "language": "python3" }, { - "name": "Upload debug symbols to CIPD for arch: arm64", + "name": "Upload to CIPD for arch: arm64", "parameters": [ "--engine-version", "${REVISION}", @@ -254,7 +255,7 @@ "language": "python3" }, { - "name": "Upload debug symbols to CIPD for arch: x64", + "name": "Upload to CIPD for arch: x64", "parameters": [ "--engine-version", "${REVISION}", diff --git a/shell/platform/fuchsia/dart_runner/BUILD.gn b/shell/platform/fuchsia/dart_runner/BUILD.gn index 1a626e1f7af47..e36ce92743002 100644 --- a/shell/platform/fuchsia/dart_runner/BUILD.gn +++ b/shell/platform/fuchsia/dart_runner/BUILD.gn @@ -190,6 +190,12 @@ template("aot_runner_package") { "//flutter/shell/platform/fuchsia/runtime/dart/profiler_symbols:dart_aot_runner", "target_gen_dir") + "/dart_aot_runner.dartprofilersymbols") + inputs = [ + vmservice_snapshot, + observatory_archive_file, + dart_profiler_symbols, + ] + resources += [ { path = vmservice_snapshot diff --git a/tools/fuchsia/build_fuchsia_artifacts.py b/tools/fuchsia/build_fuchsia_artifacts.py index 3f74d19640d0f..5258466145387 100755 --- a/tools/fuchsia/build_fuchsia_artifacts.py +++ b/tools/fuchsia/build_fuchsia_artifacts.py @@ -17,29 +17,15 @@ import sys import tempfile +from gather_flutter_runner_artifacts import CreateMetaPackage, CopyPath +from gen_package import CreateFarPackage + _script_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..')) _src_root_dir = os.path.join(_script_dir, '..', '..', '..') _out_dir = os.path.join(_src_root_dir, 'out', 'ci') _bucket_directory = os.path.join(_out_dir, 'fuchsia_bucket') -def EnsureParentExists(path): - dir_name, _ = os.path.split(path) - if not os.path.exists(dir_name): - os.makedirs(dir_name) - - -def CopyPath(src, dst): - try: - EnsureParentExists(dst) - shutil.copytree(src, dst) - except OSError as exc: - if exc.errno == errno.ENOTDIR: - shutil.copy(src, dst) - else: - raise - - def IsLinux(): return platform.system() == 'Linux' @@ -62,6 +48,40 @@ def GetFuchsiaSDKPath(): return os.path.join(_src_root_dir, 'fuchsia', 'sdk', host_os) +def GetHostArchFromPlatform(): + host_arch = platform.machine() + # platform.machine() returns AMD64 on 64-bit Windows. + if host_arch in ['x86_64', 'AMD64']: + return 'x64' + elif host_arch == 'aarch64': + return 'arm64' + raise Exception('Unsupported host architecture: %s' % host_arch) + + +def GetPMBinPath(): + return os.path.join(GetFuchsiaSDKPath(), 'tools', GetHostArchFromPlatform(), 'pm') + + +def RunExecutable(command): + subprocess.check_call(command, cwd=_src_root_dir) + + +def RunGN(variant_dir, flags): + print('Running gn for variant "%s" with flags: %s' % (variant_dir, ','.join(flags))) + RunExecutable([ + os.path.join('flutter', 'tools', 'gn'), + ] + flags) + + assert os.path.exists(os.path.join(_out_dir, variant_dir)) + + +def BuildNinjaTargets(variant_dir, targets): + assert os.path.exists(os.path.join(_out_dir, variant_dir)) + + print('Running autoninja for targets: %s' % targets) + RunExecutable(['autoninja', '-C', os.path.join(_out_dir, variant_dir)] + targets) + + def RemoveDirectoryIfExists(path): if not os.path.exists(path): return @@ -128,18 +148,22 @@ def CopyZirconFFILibIfExists(source, destination): def CopyToBucketWithMode(source, destination, aot, product, runner_type, api_level): mode = 'aot' if aot else 'jit' + product_suff = '_product' if product else '' + runner_name = '%s_%s%s_runner' % (runner_type, mode, product_suff) + far_dir_name = '%s_far' % runner_name source_root = os.path.join(_out_dir, source) - destination = os.path.join(_bucket_directory, destination, mode) - - far_file = '%s_%s%s_runner' % (runner_type, mode, '_product' if product else '') - CopyPath('%s/%s-0.far' % (source_root, far_file), '%s/%s.far' % (destination, far_file)) + far_base = os.path.join(source_root, far_dir_name) + CreateMetaPackage(far_base, runner_name) + pm_bin = GetPMBinPath() + key_path = os.path.join(_script_dir, 'development.key') + destination = os.path.join(_bucket_directory, destination, mode) + CreateFarPackage(pm_bin, far_base, key_path, destination, api_level) patched_sdk_dirname = '%s_runner_patched_sdk' % runner_type patched_sdk_dir = os.path.join(source_root, patched_sdk_dirname) dest_sdk_path = os.path.join(destination, patched_sdk_dirname) if not os.path.exists(dest_sdk_path): CopyPath(patched_sdk_dir, dest_sdk_path) - CopyGenSnapshotIfExists(source_root, destination) CopyFlutterTesterBinIfExists(source_root, destination) CopyZirconFFILibIfExists(source_root, destination) @@ -274,6 +298,39 @@ def ProcessCIPDPackage(upload, engine_version): ]) +def BuildTarget( + runtime_mode, arch, optimized, enable_lto, enable_legacy, asan, dart_version_git_info, + prebuilt_dart_sdk, build_targets +): + unopt = "_unopt" if not optimized else "" + out_dir = 'fuchsia_%s%s_%s' % (runtime_mode, unopt, arch) + flags = [ + '--fuchsia', + '--fuchsia-cpu', + arch, + '--runtime-mode', + runtime_mode, + ] + + if not optimized: + flags.append('--unoptimized') + if not enable_lto: + flags.append('--no-lto') + if not enable_legacy: + flags.append('--no-fuchsia-legacy') + if asan: + flags.append('--asan') + if not dart_version_git_info: + flags.append('--no-dart-version-git-info') + if not prebuilt_dart_sdk: + flags.append('--no-prebuilt-dart-sdk') + + RunGN(out_dir, flags) + BuildNinjaTargets(out_dir, build_targets) + + return + + def main(): parser = argparse.ArgumentParser() @@ -324,6 +381,13 @@ def main(): help='If set, disables legacy code for the build.' ) + parser.add_argument( + '--skip-build', + action='store_true', + default=False, + help='If set, skips building and just creates packages.' + ) + parser.add_argument( '--targets', default='', @@ -383,6 +447,12 @@ def main(): runtime_mode = runtime_modes[i] product = product_modes[i] if build_mode == 'all' or runtime_mode == build_mode: + if not args.skip_build: + BuildTarget( + runtime_mode, arch, optimized, enable_lto, enable_legacy, args.asan, + not args.no_dart_version_git_info, not args.no_prebuilt_dart_sdk, + args.targets.split(",") if args.targets else ['flutter'] + ) CopyBuildToBucket(runtime_mode, arch, optimized, product) # This is a hack. The recipe for building and uploading Fuchsia to CIPD diff --git a/tools/fuchsia/copy_path.py b/tools/fuchsia/copy_path.py new file mode 100755 index 0000000000000..7f46cb0918057 --- /dev/null +++ b/tools/fuchsia/copy_path.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Flutter Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Copies paths, creates if they do not exist. +""" + +import argparse +import errno +import json +import os +import platform +import shutil +import subprocess +import sys + + +def EnsureParentExists(path): + dir_name, _ = os.path.split(path) + if not os.path.exists(dir_name): + os.makedirs(dir_name) + + +def SameStat(s1, s2): + return s1.st_ino == s2.st_ino and s1.st_dev == s2.st_dev + + +def SameFile(f1, f2): + if not os.path.exists(f2): + return False + s1 = os.stat(f1) + s2 = os.stat(f2) + return SameStat(s1, s2) + + +def CopyPath(src, dst): + try: + EnsureParentExists(dst) + shutil.copytree(src, dst) + except OSError as exc: + if exc.errno == errno.ENOTDIR: + if not SameFile(src, dst): + shutil.copyfile(src, dst) + else: + raise + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument('--file-list', dest='file_list', action='store', required=True) + + args = parser.parse_args() + + files = open(args.file_list, 'r') + files_to_copy = files.read().split() + num_files = len(files_to_copy) // 2 + + for i in range(num_files): + CopyPath(files_to_copy[i], files_to_copy[num_files + i]) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools/fuchsia/development.key b/tools/fuchsia/development.key new file mode 100644 index 0000000000000..844d15087194e --- /dev/null +++ b/tools/fuchsia/development.key @@ -0,0 +1 @@ +.="õ¬ù;iµëÄiÊK‰°õ œéñ3®¦÷¯–ÛkÀѨKizeîË5¯b4•a¶&ìѪímÜqÂ)ƶƒ \ No newline at end of file diff --git a/tools/fuchsia/fuchsia_archive.gni b/tools/fuchsia/fuchsia_archive.gni index 911c30ac3da03..d18a4c9ec8043 100644 --- a/tools/fuchsia/fuchsia_archive.gni +++ b/tools/fuchsia/fuchsia_archive.gni @@ -5,135 +5,244 @@ import("//flutter/tools/fuchsia/fuchsia_debug_symbols.gni") import("//flutter/tools/fuchsia/fuchsia_libs.gni") import("//flutter/tools/fuchsia/gn-sdk/src/cmc.gni") -import("//flutter/tools/fuchsia/gn-sdk/src/component.gni") import("//flutter/tools/fuchsia/gn-sdk/src/gn_configs.gni") -import("//flutter/tools/fuchsia/gn-sdk/src/package.gni") -# Creates a Fuchsia archive (.far) file from the Fuchsia SDK and gn-sdk. -# -# An archive combines an ELF binary and a component manifest to create -# a packaged Fuchsia program that can be deployed to a Fuchsia device. +# Alias of cmc_compile in gn-sdk/src/cmc.gni +template("_compile_cml") { + assert(defined(invoker.manifest), "_compile_cml must define manifest") + + # Create an empty depfile, it's not used in flutter. + write_file("${target_gen_dir}/${target_name}/${target_name}.d", + [], + "list lines") + + cmc_compile(target_name) { + forward_variables_from(invoker, + [ + "deps", + "manifest", + "testonly", + ]) + output_file = invoker.output + } +} + +# TODO(zijiehe): May use fuchsia_package in gn-sdk if possible. - http://crbug.com/40935282 + +# Creates a Fuchsia archive (.far) file using PM from the Fuchsia SDK. # -# binary (optional): -# The ELF binary for the archive's program, or target_name if unspecified. +# binary (required): +# The ELF binary for the archive's program. # deps (optional): # The code dependencies for the archive. +# inputs (optional): +# When these files are changed, the package should be regenerated. # libraries (optional): # Paths to .so libraries that should be dynamically linked to the binary. -# manifest (optional): -# The component manifest cml file, or meta/binary.cml if unspecified. # resources (optional): # Files that should be placed into the `data/` directory of the archive. # testonly (optional): # Set this to true for archives that are only used for tests. -template("fuchsia_archive") { - if (defined(invoker.binary)) { - _binary = invoker.binary - } else { - _binary = target_name - } +template("_fuchsia_archive") { + assert(defined(invoker.binary), "package must define binary") - if (defined(invoker.manifest)) { - _manifest = invoker.manifest - } else { - _manifest = rebase_path("meta/${_binary}.cml") - } - - _component_target = target_name + "__component" - fuchsia_component(_component_target) { + pkg_testonly = defined(invoker.testonly) && invoker.testonly + pkg_target_name = target_name + pkg = { + package_version = "0" # placeholder forward_variables_from(invoker, [ + "binary", "deps", - "testonly", + "resources", + "libraries", ]) - manifest = _manifest + if (!defined(package_name)) { + package_name = pkg_target_name + } + if (!defined(deps)) { + deps = [] + } + if (!defined(resources)) { + resources = [] + } + if (!defined(libraries)) { + libraries = [] + } + } - resources = [ - { - path = "$root_out_dir/$_binary" - dest = "bin/app" - }, - ] - if (defined(invoker.resources)) { - foreach(resource, invoker.resources) { - resources += [ - { - path = resource.path - dest = "data/${resource.dest}" - }, - ] - } + far_base_dir = "$root_out_dir/${pkg_target_name}_far" + + copy_sources = [ "$root_out_dir/${invoker.binary}" ] + copy_outputs = [ "$far_base_dir/bin/app" ] + + foreach(res, pkg.resources) { + copy_sources += [ res.path ] + copy_outputs += [ "$far_base_dir/data/${res.dest}" ] + } + + foreach(lib, pkg.libraries) { + output_path = "" + + if (defined(lib.output_path)) { + output_path = lib.output_path } - _libs = common_libs - if (defined(invoker.libraries)) { - _libs += invoker.libraries + copy_sources += [ "${lib.path}/${lib.name}" ] + copy_outputs += [ "$far_base_dir/lib/${output_path}${lib.name}" ] + } + + pkg_dir_deps = pkg.deps + + write_file("${far_base_dir}/meta/package", + { + name = pkg.package_name + version = pkg.package_version + }, + "json") + + _dbg_symbols_target = "${target_name}_dbg_symbols" + fuchsia_debug_symbols(_dbg_symbols_target) { + deps = pkg.deps + testonly = pkg_testonly + binary = invoker.binary + } + + action("${target_name}_dir") { + script = "//flutter/tools/fuchsia/copy_path.py" + sources = copy_sources + response_file_contents = rebase_path(copy_sources + copy_outputs) + deps = pkg_dir_deps + args = [ "--file-list={{response_file_name}}" ] + outputs = copy_outputs + testonly = pkg_testonly + } + + manifest_json_file = "${root_out_dir}/${target_name}_package_manifest.json" + action(target_name) { + script = "//flutter/tools/fuchsia/gen_package.py" + deps = pkg_dir_deps + [ + ":${target_name}_dir", + ":${_dbg_symbols_target}", + ] + + sources = copy_outputs + + inputs = [] + if (defined(invoker.inputs)) { + inputs += invoker.inputs } - foreach(lib, _libs) { - output_path = "" - if (defined(lib.output_path)) { - output_path = lib.output_path - } - resources += [ - { - path = "${lib.path}/${lib.name}" - dest = "lib/${output_path}${lib.name}" - }, + + args = [ + "--pm-bin", + rebase_path("$fuchsia_tool_dir/pm"), + "--package-dir", + rebase_path(far_base_dir), + "--far-name", + target_name, + "--manifest-json-file", + rebase_path(manifest_json_file, root_build_dir), + ] + + assert(fuchsia_target_api_level != -1, + "Must set a target api level when creating an archive") + if (fuchsia_target_api_level != -1) { + args += [ + "--api-level", + "${fuchsia_target_api_level}", ] } + + outputs = [ + manifest_json_file, + "${far_base_dir}.manifest", + "$root_out_dir/${target_name}-0.far", + ] + testonly = pkg_testonly + } +} + +# Creates a Fuchsia archive. +# +# An archive combines an ELF binary and a component manifest to create +# a packaged Fuchsia program that can be deployed to a Fuchsia device. +# +# binary (optional): +# The ELF binary for the archive's program, or target_name if unspecified. +# deps (optional): +# The code dependencies for the archive. +# inputs (optional): +# When these files are changed, the package should be regenerated. +# libraries (optional): +# Paths to .so libraries that should be dynamically linked to the binary. +# resources (optional): +# Files that should be placed into the `data/` directory of the archive. +# testonly (optional): +# Set this to true for archives that are only used for tests. +template("fuchsia_archive") { + if (!defined(invoker.binary)) { + _binary = target_name + } else { + _binary = invoker.binary } + _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + + _cml_file = rebase_path("meta/${_binary}.cml") + _far_base_dir = "$root_out_dir/${target_name}_far" + _cml_file_name = get_path_info(_cml_file, "name") + _compile_cml_target = "${target_name}_${_cml_file_name}_compile_cml" - _package_target = target_name + "__package" - _package_name = target_name - fuchsia_package(_package_target) { + _compile_cml(_compile_cml_target) { forward_variables_from(invoker, [ "testonly" ]) - package_name = _package_name - deps = [ ":$_component_target" ] + + manifest = _cml_file + output = "$_far_base_dir/meta/${_cml_file_name}.cm" } + _deps += [ ":$_compile_cml_target" ] - # TODO(zijiehe): http://crbug.com/368608542, prefer using - # gn-sdk/build_id_dir.gni - _build_id_target = target_name + "__build_id" - fuchsia_debug_symbols(_build_id_target) { + _fuchsia_archive(target_name) { + deps = _deps + binary = _binary forward_variables_from(invoker, [ - "deps", + "inputs", + "libraries", + "resources", "testonly", ]) - binary = _binary - } - - # TODO(zijiehe): http://crbug.com/368608542, copying the far files is not very - # necessary, try to remove the -0.far copy. - copy(target_name) { - forward_variables_from(invoker, [ "testonly" ]) - package_output_dir = get_label_info(":$_package_target", "target_gen_dir") - sources = [ "$package_output_dir/$_package_name/${_package_name}.far" ] - outputs = [ "$root_out_dir/${_package_name}-0.far" ] - deps = [ - ":$_build_id_target", - ":$_package_target", - ] } } # Creates a Fuchsia archive (.far) file containing a generated test root -# component and test driver component. +# component and test driver component, using PM from the Fuchsia SDK. # -# binary: -# Forward to fuchsia_archive +# binary (optional): +# The binary for the test, or target_name if unspecified. # deps (required): # Dependencies for the test archive. # gen_cml_file (optional): # If is defined and true, an interpolate cml file will be generated. -# libraries: -# Forward to fuchsia_archive -# resources: -# Forward to fuchsia_archive +# libraries (optional): +# Paths to .so libraries that should be dynamically linked to the binary. +# resources (optional): +# Files that should be placed into the `data/` directory of the archive. template("fuchsia_test_archive") { assert(defined(invoker.deps), "package must define deps") - _deps = invoker.deps - if (defined(invoker.gen_cml_file) && invoker.gen_cml_file) { + if (!defined(invoker.binary)) { + _binary = target_name + } else { + _binary = invoker.binary + } + _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + + _generated_cml = defined(invoker.gen_cml_file) && invoker.gen_cml_file + if (_generated_cml) { _cml_file = "$root_out_dir/${target_name}.cml" _interpolate_cml_target = "${target_name}_interpolate_cml" action(_interpolate_cml_target) { @@ -150,20 +259,36 @@ template("fuchsia_test_archive") { ] outputs = [ _cml_file ] } - _deps += [ ":$_interpolate_cml_target" ] + } else { + _cml_file = rebase_path("meta/${_binary}.cml") } - fuchsia_archive(target_name) { - forward_variables_from(invoker, - [ - "binary", - "libraries", - "resources", - ]) + _far_base_dir = "$root_out_dir/${target_name}_far" + _cml_file_name = get_path_info(_cml_file, "name") + _compile_cml_target = "${target_name}_${_cml_file_name}_compile_cml" + + _compile_cml(_compile_cml_target) { + testonly = true + + manifest = _cml_file + output = "$_far_base_dir/meta/${_cml_file_name}.cm" + + if (_generated_cml) { + deps = [ ":$_interpolate_cml_target" ] + } + } + _deps += [ ":$_compile_cml_target" ] + + _fuchsia_archive(target_name) { testonly = true - if (defined(_cml_file)) { - manifest = _cml_file + binary = _binary + forward_variables_from(invoker, [ "resources" ]) + + libraries = common_libs + if (defined(invoker.libraries)) { + libraries += invoker.libraries } + deps = _deps } } diff --git a/tools/fuchsia/gather_flutter_runner_artifacts.py b/tools/fuchsia/gather_flutter_runner_artifacts.py new file mode 100755 index 0000000000000..1b92a67214fa1 --- /dev/null +++ b/tools/fuchsia/gather_flutter_runner_artifacts.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Flutter Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Gather all the fuchsia artifacts to a destination directory. +""" + +import argparse +import errno +import json +import os +import platform +import shutil +import subprocess +import sys + +_ARTIFACT_PATH_TO_DST = { + 'flutter_jit_runner': 'flutter_jit_runner', 'icudtl.dat': 'data/icudtl.dat', + 'dart_runner': 'dart_runner', 'flutter_patched_sdk': 'flutter_patched_sdk' +} + + +def EnsureParentExists(path): + dir_name, _ = os.path.split(path) + if not os.path.exists(dir_name): + os.makedirs(dir_name) + + +def CopyPath(src, dst): + try: + EnsureParentExists(dst) + shutil.copytree(src, dst) + except OSError as exc: + if exc.errno == errno.ENOTDIR: + shutil.copy(src, dst) + else: + raise + + +def CreateMetaPackage(dst_root, far_name): + meta = os.path.join(dst_root, 'meta') + if not os.path.isdir(meta): + os.makedirs(meta) + content = {} + content['name'] = far_name + content['version'] = '0' + package = os.path.join(meta, 'package') + with open(package, 'w') as out_file: + json.dump(content, out_file) + + +def GatherArtifacts(src_root, dst_root, create_meta_package=True): + if not os.path.exists(dst_root): + os.makedirs(dst_root) + else: + shutil.rmtree(dst_root) + + for src_rel, dst_rel in _ARTIFACT_PATH_TO_DST.items(): + src_full = os.path.join(src_root, src_rel) + dst_full = os.path.join(dst_root, dst_rel) + if not os.path.exists(src_full): + print('Unable to find artifact: ', str(src_full)) + sys.exit(1) + CopyPath(src_full, dst_full) + + if create_meta_package: + CreateMetaPackage(dst_root, 'flutter_runner') + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument('--artifacts-root', dest='artifacts_root', action='store', required=True) + parser.add_argument('--dest-dir', dest='dst_dir', action='store', required=True) + + args = parser.parse_args() + + assert os.path.exists(args.artifacts_root) + dst_parent = os.path.abspath(os.path.join(args.dst_dir, os.pardir)) + assert os.path.exists(dst_parent) + + GatherArtifacts(args.artifacts_root, args.dst_dir) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools/fuchsia/gen_package.py b/tools/fuchsia/gen_package.py new file mode 100755 index 0000000000000..5ca757d845121 --- /dev/null +++ b/tools/fuchsia/gen_package.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Flutter Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Generate a Fuchsia FAR Archive from an asset manifest. +""" + +import argparse +import collections +import json +import os +import subprocess +import sys + +from gather_flutter_runner_artifacts import CreateMetaPackage + + +# Generates the manifest and returns the file. +def GenerateManifest(package_dir): + full_paths = [] + for root, dirs, files in os.walk(package_dir): + for f in files: + common_prefix = os.path.commonprefix([root, package_dir]) + rel_path = os.path.relpath(os.path.join(root, f), common_prefix) + from_package = os.path.abspath(os.path.join(package_dir, rel_path)) + assert from_package, 'Failed to create from_package for %s' % os.path.join(root, f) + full_paths.append('%s=%s' % (rel_path, from_package)) + + parent_dir = os.path.abspath(os.path.join(package_dir, os.pardir)) + manifest_file_name = os.path.basename(package_dir) + '.manifest' + manifest_path = os.path.join(parent_dir, manifest_file_name) + with open(manifest_path, 'w') as f: + for item in full_paths: + f.write("%s\n" % item) + return manifest_path + + +def CreateFarPackage(pm_bin, package_dir, signing_key, dst_dir, api_level): + manifest_path = GenerateManifest(package_dir) + + pm_command_base = [ + pm_bin, '-m', manifest_path, '-k', signing_key, '-o', dst_dir, '--api-level', api_level + ] + + # Build the package + subprocess.check_output(pm_command_base + ['build']) + + # Archive the package + subprocess.check_output(pm_command_base + ['archive']) + + return 0 + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument('--pm-bin', dest='pm_bin', action='store', required=True) + parser.add_argument('--package-dir', dest='package_dir', action='store', required=True) + parser.add_argument('--manifest-file', dest='manifest_file', action='store', required=False) + parser.add_argument( + '--manifest-json-file', dest='manifest_json_file', action='store', required=True + ) + parser.add_argument('--far-name', dest='far_name', action='store', required=False) + parser.add_argument('--api-level', dest='api_level', action='store', required=False) + + args = parser.parse_args() + + assert os.path.exists(args.pm_bin) + assert os.path.exists(args.package_dir) + pkg_dir = args.package_dir + + if not os.path.exists(os.path.join(pkg_dir, 'meta', 'package')): + CreateMetaPackage(pkg_dir, args.far_name) + + output_dir = os.path.abspath(pkg_dir + '_out') + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + manifest_file = None + if args.manifest_file is not None: + assert os.path.exists(args.manifest_file) + manifest_file = args.manifest_file + else: + manifest_file = GenerateManifest(args.package_dir) + + pm_command_base = [ + args.pm_bin, + '-o', + output_dir, + '-n', + args.far_name, + '-m', + manifest_file, + ] + + # Build and then archive the package + # Use check_output so if anything goes wrong we get the output. + try: + + build_command = ['build', '--output-package-manifest', args.manifest_json_file] + + if args.api_level is not None: + build_command = ['--api-level', args.api_level] + build_command + + archive_command = [ + 'archive', '--output=' + os.path.join(os.path.dirname(output_dir), args.far_name + "-0") + ] + + pm_commands = [build_command, archive_command] + + for pm_command in pm_commands: + subprocess.check_output(pm_command_base + pm_command) + except subprocess.CalledProcessError as e: + print('==================== Manifest contents =========================================') + with open(manifest_file, 'r') as manifest: + sys.stdout.write(manifest.read()) + print('==================== End manifest contents =====================================') + meta_contents_path = os.path.join(output_dir, 'meta', 'contents') + if os.path.exists(meta_contents_path): + print('==================== meta/contents =============================================') + with open(meta_contents_path, 'r') as meta_contents: + sys.stdout.write(meta_contents.read()) + print('==================== End meta/contents =========================================') + raise + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools/fuchsia/gen_repo.py b/tools/fuchsia/gen_repo.py new file mode 100755 index 0000000000000..b795254b8df06 --- /dev/null +++ b/tools/fuchsia/gen_repo.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Flutter Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Generate a Fuchsia repo capable of serving Fuchsia archives over the +network. +""" +import argparse +import collections +import json +import os +import subprocess +import sys + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument('--pm-bin', dest='pm_bin', action='store', required=True) + parser.add_argument('--repo-dir', dest='repo_dir', action='store', required=True) + parser.add_argument('--archive', dest='archives', action='append', required=True) + + args = parser.parse_args() + + assert os.path.exists(args.pm_bin) + + if not os.path.exists(args.repo_dir): + pm_newrepo_command = [args.pm_bin, 'newrepo', '-repo', args.repo_dir] + subprocess.check_call(pm_newrepo_command) + + pm_publish_command = [ + args.pm_bin, + 'publish', + '-C', # Remove all previous registrations. + '-a', # Publish archives from an archive (mode). + '-repo', + args.repo_dir + ] + + for archive in args.archives: + pm_publish_command.append('-f') + pm_publish_command.append(archive) + + subprocess.check_call(pm_publish_command) + + return 0 + + +if __name__ == '__main__': + sys.exit(main())